diff --git a/package-lock.json b/package-lock.json
index bdf57a8..d9e78d0 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -9,9 +9,11 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
+ "better-sqlite3": "^12.2.0",
"body-parser": "^2.2.0",
"compression": "^1.8.0",
"cookie-parser": "^1.4.7",
+ "crypto": "^1.0.1",
"csurf": "^1.11.0",
"dotenv": "^16.5.0",
"express": "^5.1.0",
@@ -640,6 +642,20 @@
"node": ">=10.0.0"
}
},
+ "node_modules/better-sqlite3": {
+ "version": "12.2.0",
+ "resolved": "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-12.2.0.tgz",
+ "integrity": "sha512-eGbYq2CT+tos1fBwLQ/tkBt9J5M3JEHjku4hbvQUePCckkvVf14xWj+1m7dGoK81M/fOjFT7yM9UMeKT/+vFLQ==",
+ "hasInstallScript": true,
+ "license": "MIT",
+ "dependencies": {
+ "bindings": "^1.5.0",
+ "prebuild-install": "^7.1.1"
+ },
+ "engines": {
+ "node": "20.x || 22.x || 23.x || 24.x"
+ }
+ },
"node_modules/binary-extensions": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
@@ -1257,6 +1273,13 @@
"node": ">= 8"
}
},
+ "node_modules/crypto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz",
+ "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==",
+ "deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in.",
+ "license": "ISC"
+ },
"node_modules/csrf": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/csrf/-/csrf-3.1.0.tgz",
diff --git a/package.json b/package.json
index 9ece88e..fc1f559 100644
--- a/package.json
+++ b/package.json
@@ -18,9 +18,11 @@
"license": "ISC",
"type": "commonjs",
"dependencies": {
+ "better-sqlite3": "^12.2.0",
"body-parser": "^2.2.0",
"compression": "^1.8.0",
"cookie-parser": "^1.4.7",
+ "crypto": "^1.0.1",
"csurf": "^1.11.0",
"dotenv": "^16.5.0",
"express": "^5.1.0",
diff --git a/public/css/logs.css b/public/css/logs.css
new file mode 100644
index 0000000..761068f
--- /dev/null
+++ b/public/css/logs.css
@@ -0,0 +1,32 @@
+ body {
+ font-family: monospace, monospace;
+ margin: 20px;
+ }
+
+ table {
+ border-collapse: collapse;
+ width: 100%;
+ }
+
+ th,
+ td {
+ border: 1px solid #ccc;
+ padding: 8px;
+ text-align: left;
+ }
+
+ th {
+ background: #eee;
+ }
+
+ textarea {
+ width: 100%;
+ height: 100px;
+ font-family: monospace;
+ white-space: pre-wrap;
+ }
+
+ select,
+ button {
+ margin: 5px 0;
+ }
diff --git a/public/js/logs.js b/public/js/logs.js
new file mode 100644
index 0000000..2e0a92c
--- /dev/null
+++ b/public/js/logs.js
@@ -0,0 +1,60 @@
+const form = document.getElementById("filterForm");
+const theadRow = document.getElementById("logsTableHeaderRow");
+const tbody = document.querySelector("#logsTable tbody");
+
+form.addEventListener("submit", async (e) => {
+ e.preventDefault();
+ theadRow.innerHTML = "";
+ tbody.innerHTML = "";
+ const params = new URLSearchParams(new FormData(form));
+
+ try {
+ const res = await fetch("/logs", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/x-www-form-urlencoded",
+ Accept: "application/json",
+ },
+ body: params.toString(),
+ });
+
+ if (!res.ok) throw new Error(await res.text());
+ const logs = await res.json();
+
+ if (logs.length === 0) {
+ tbody.innerHTML = '
| No logs found |
';
+ return;
+ }
+
+ const columnSet = new Set();
+ for (const log of logs) {
+ Object.keys(log).forEach((key) => columnSet.add(key));
+ }
+ const columns = Array.from(columnSet);
+
+ for (const col of columns) {
+ const th = document.createElement("th");
+ th.textContent = col;
+ theadRow.appendChild(th);
+ }
+
+ for (const log of logs) {
+ const tr = document.createElement("tr");
+ for (const col of columns) {
+ const td = document.createElement("td");
+ const value = log[col];
+ if (col === "stack" && typeof value === "string") {
+ td.innerHTML = `${value
+ .replace(//g, ">")}`;
+ } else {
+ td.textContent = value ?? "";
+ }
+ tr.appendChild(td);
+ }
+ tbody.appendChild(tr);
+ }
+ } catch (error) {
+ tbody.innerHTML = `| Error loading logs: ${error.message} |
`;
+ }
+});
diff --git a/src/middleware/errorHandler.js b/src/middleware/errorHandler.js
index 0f79951..589d825 100644
--- a/src/middleware/errorHandler.js
+++ b/src/middleware/errorHandler.js
@@ -1,23 +1,34 @@
-const path = require("path");
-
+const crypto = require("crypto");
+const getBaseContext = require("../utils/baseContext");
module.exports = async (err, req, res, next) => {
const statusCode = err.statusCode ?? 500;
const message = err.message ?? "Internal Server Error";
+ const stack = err.stack ?? "No stack trace available";
+ const code = err.code ?? null;
+ const requestId = crypto.randomUUID?.() ?? Date.now().toString(36);
+
+ const logEntry = {
+ timestamp: new Date().toISOString(),
+ level: "error",
+ requestId,
+ method: req.method,
+ url: req.originalUrl || req.url,
+ statusCode,
+ code,
+ message,
+ stack,
+ headers: req.headers,
+ query: req.query,
+ body: req.body,
+ ip: req.ip || req.connection?.remoteAddress,
+ };
if (req?.log?.error) {
- req.log.error(
- JSON.stringify({
- message,
- stack: err.stack || "No stack trace available",
- method: req.method,
- url: req.originalUrl || req.url,
- statusCode,
- code: err.code || null,
- })
- );
+ req.log.error(logEntry);
} else {
- console.error(err);
+ console.error(JSON.stringify(logEntry, null, 2));
}
+
const errorContextMap = {
EBADCSRFTOKEN: {
title: "Forbidden",
@@ -30,13 +41,35 @@
statusCode: 404,
},
};
- const errorKey = err.code || err.statusCode;
- const defaultErrorContext = {
+
+ const errorContext = errorContextMap[code || statusCode] || {
title: `Error ${statusCode}`,
message: "An unexpected error occurred. Please try again later.",
statusCode,
};
- const errorContext = errorContextMap[errorKey] || defaultErrorContext;
+ const isProd = process.env.NODE_ENV == "production";
+ const context = {
+ title: errorContext.title,
+ message: isProd ? errorContext.message : message,
+ content: isProd
+ ? ""
+ : {
+ requestId,
+ method: req.method,
+ url: req.originalUrl || req.url,
+ statusCode,
+ headers: req.headers,
+ query: req.query,
+ body: req.body,
+ ip: req.ip || req.connection?.remoteAddress,
+ stack,
+ },
+ };
- res.redirect(`/error?code=${errorContext.statusCode}`);
+ if (process.env.NODE_ENV === "production") {
+ res.redirect(`/error?code=${errorContext.statusCode}`);
+ } else {
+ const errorPageContext = await getBaseContext(context);
+ res.status(errorContext.statusCode).render("pages/error", errorPageContext);
+ }
};
diff --git a/src/middleware/logging.js b/src/middleware/logging.js
index e19cb7f..f8ac029 100644
--- a/src/middleware/logging.js
+++ b/src/middleware/logging.js
@@ -1,58 +1,9 @@
-// src/middleware/logging.js
-const morgan = require("morgan");
-const { winstonLogger, manualLogger } = require("../utils/logging");
+const { winstonLogger } = require("../utils/logging");
+const structuredLogger = require("../utils/structuredLogger");
-const logFormat =
- ":method :url :status :response-time ms - :res[content-length]";
-
-// function createMorgan(stream, skip) {
-// return morgan(logFormat, { stream, skip });
-// }
-
-// const morganInfo = createMorgan(
-// manualLogger.streams.info,
-// (req, res) => res.statusCode >= 400
-// );
-
-// const morganWarn = createMorgan(
-// manualLogger.streams.warn,
-// (req, res) => res.statusCode < 400 || res.statusCode >= 500
-// );
-
-// const morganError = createMorgan(
-// manualLogger.streams.error,
-// (req, res) => res.statusCode < 500
-// );
-
-// const loggingMiddleware = (req, res, next) => {
-// req.log = manualLogger;
-// next();
-// };
-
-// Define write streams for morgan using Winston's transports
-const createStreamWriter = (level) => {
- return {
- write: (message) => {
- winstonLogger.log({ level, message: message.trim() });
- },
- };
-};
-
-// Morgan instances by log level
-const morganInfo = morgan(logFormat, {
- stream: createStreamWriter("info"),
- skip: (req, res) => res.statusCode >= 400,
-});
-
-const morganWarn = morgan(logFormat, {
- stream: createStreamWriter("warn"),
- skip: (req, res) => res.statusCode < 400 || res.statusCode >= 500,
-});
-
-const morganError = morgan(logFormat, {
- stream: createStreamWriter("error"),
- skip: (req, res) => res.statusCode < 500,
-});
+const morganInfo = structuredLogger("info");
+const morganWarn = structuredLogger("warn");
+const morganError = structuredLogger("error");
// Middleware to inject logger into req
const loggingMiddleware = (req, res, next) => {
diff --git a/src/routes/index.js b/src/routes/index.js
index 829777d..bb6efd3 100644
--- a/src/routes/index.js
+++ b/src/routes/index.js
@@ -13,10 +13,17 @@
const post = require("./post");
const pages = require("./pages");
const rssFeed = require("./rssFeed");
+const logs = require("./logs");
router.get("/error", errorPage); // Landing page after error is logged
router.get("/favicon.ico", (req, res) => res.status(204).end());
+
+if (process.env.NODE_ENV != "production") {
+ const logs = require("./logs");
+ // router.use(logs);
+}
+
router.post("/track", analytics);
router.use(
"/static",
@@ -32,8 +39,6 @@
);
router.get("/favicon.ico", (req, res) => res.status(204).end());
-router.post("/track", analytics);
-
router.use(blog_index);
router.use(robots);
router.use(contact, csrfToken);
diff --git a/src/routes/logs.js b/src/routes/logs.js
new file mode 100644
index 0000000..201d5b3
--- /dev/null
+++ b/src/routes/logs.js
@@ -0,0 +1,108 @@
+const express = require("express");
+const router = express.Router();
+const Database = require("better-sqlite3");
+const path = require("path");
+const fs = require("fs");
+
+const allowedLevels = ["warn", "error", "info", "debug", "functions", "notice"];
+const allowedTypes = ["testing", "live", "dev"];
+
+const dbPath = path.resolve(__dirname, "../../data/logs.sqlite3");
+console.log(dbPath);
+if (!fs.existsSync(dbPath)) {
+ // Create empty file to allow readonly open later
+ fs.closeSync(fs.openSync(dbPath, "w"));
+ // Optionally initialize schema here or open writable once for setup
+}
+
+const db = new Database(dbPath, { readonly: true });
+
+router.get("/logs", (req, res) => {
+ res.render("pages/logs", { layout: "logs" });
+});
+
+router.post("/logs", (req, res) => {
+ const log_type = req.query.log_type || "*";
+ const log_level = req.query.log_level || "*";
+ const date = req.query.date || "*";
+
+ if (log_level !== "*" && !allowedLevels.includes(log_level)) {
+ return res.status(400).json({ error: "Invalid log_level" });
+ }
+ if (log_type !== "*" && !allowedTypes.includes(log_type)) {
+ return res.status(400).json({ error: "Invalid log_type" });
+ }
+
+ const conditions = [];
+ const params = [];
+
+ if (log_level !== "*") {
+ conditions.push("l.level = ?");
+ params.push(log_level);
+ }
+
+ if (date !== "*") {
+ conditions.push("date(l.timestamp) = ?");
+ params.push(date);
+ }
+
+ if (log_type !== "*") {
+ conditions.push(`EXISTS (
+ SELECT 1 FROM log_metadata m
+ JOIN keys k ON k.id = m.key_id
+ WHERE m.log_id = l.id AND k.key = 'type' AND m.value = ?
+ )`);
+ params.push(log_type);
+ }
+
+ const whereClause = conditions.length
+ ? "WHERE " + conditions.join(" AND ")
+ : "";
+
+ const query = `
+ SELECT
+ l.id,
+ l.timestamp,
+ l.level,
+ GROUP_CONCAT(k.key || '=' || m.value, '||') AS meta_kv
+ FROM logs l
+ LEFT JOIN log_metadata m ON m.log_id = l.id
+ LEFT JOIN keys k ON k.id = m.key_id
+ ${whereClause}
+ GROUP BY l.id
+ ORDER BY l.timestamp DESC
+ LIMIT 500
+ `;
+
+ try {
+ const rows = db.prepare(query).all(...params);
+
+ const logs = rows.map((row) => {
+ const meta = {};
+ if (row.meta_kv) {
+ for (const pair of row.meta_kv.split("||")) {
+ const [k, v] = pair.split("=");
+ if (k && v !== undefined) {
+ try {
+ meta[k] = JSON.parse(v);
+ } catch {
+ meta[k] = v;
+ }
+ }
+ }
+ }
+ return {
+ id: row.id,
+ timestamp: row.timestamp,
+ level: row.level,
+ ...meta,
+ };
+ });
+
+ res.json(logs);
+ } catch {
+ res.status(500).json({ error: "Failed to query logs" });
+ }
+});
+
+module.exports = router;
diff --git a/src/utils/SQLiteTransport.js b/src/utils/SQLiteTransport.js
new file mode 100644
index 0000000..69c797b
--- /dev/null
+++ b/src/utils/SQLiteTransport.js
@@ -0,0 +1,114 @@
+const Transport = require("winston-transport");
+const Database = require("better-sqlite3");
+const path = require("path");
+
+class SQLiteTransport extends Transport {
+ constructor(opts) {
+ super(opts);
+ this.db = new Database(path.resolve(__dirname, "../../data/logs.sqlite3"));
+
+ this.db.exec(`
+ CREATE TABLE IF NOT EXISTS logs (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ timestamp TEXT NOT NULL,
+ level TEXT NOT NULL
+ );
+ CREATE TABLE IF NOT EXISTS keys (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ key TEXT UNIQUE NOT NULL
+ );
+ CREATE TABLE IF NOT EXISTS log_metadata (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ log_id INTEGER NOT NULL,
+ key_id INTEGER NOT NULL,
+ value TEXT NOT NULL,
+ FOREIGN KEY(log_id) REFERENCES logs(id) ON DELETE CASCADE,
+ FOREIGN KEY(key_id) REFERENCES keys(id) ON DELETE CASCADE
+ );
+ CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON logs(timestamp);
+ CREATE INDEX IF NOT EXISTS idx_logs_level ON logs(level);
+ CREATE INDEX IF NOT EXISTS idx_log_metadata_logid_keyid ON log_metadata(log_id, key_id);
+ CREATE INDEX IF NOT EXISTS idx_log_metadata_keyid_value ON log_metadata(key_id, value);
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_keys_key ON keys(key);
+ `);
+
+ this.insertLog = this.db.prepare(
+ `INSERT INTO logs (timestamp, level) VALUES (?, ?)`
+ );
+ this.getKeyId = this.db.prepare(`SELECT id FROM keys WHERE key = ?`);
+ this.insertKey = this.db.prepare(`INSERT INTO keys (key) VALUES (?)`);
+ this.insertMetadata = this.db.prepare(
+ `INSERT INTO log_metadata (log_id, key_id, value) VALUES (?, ?, ?)`
+ );
+ }
+
+ getOrCreateKeyId(key) {
+ let row = this.getKeyId.get(key);
+ if (!row) {
+ const info = this.insertKey.run(key);
+ return info.lastInsertRowid;
+ }
+ return row.id;
+ }
+
+ // Helper function to safely stringify values
+ safeStringify(value) {
+ if (value === null || value === undefined) {
+ return "";
+ }
+ if (typeof value === "string") {
+ return value;
+ }
+ if (typeof value === "object") {
+ try {
+ return JSON.stringify(value);
+ } catch (err) {
+ return "[Circular Reference]";
+ }
+ }
+ return String(value);
+ }
+
+ log(info, callback) {
+ setImmediate(() => this.emit("logged", info));
+
+ const {
+ timestamp = new Date().toISOString(),
+ level,
+ message,
+ ...meta
+ } = info;
+
+ const insertLogTxn = this.db.transaction(() => {
+ const result = this.insertLog.run(timestamp, level);
+ const logId = result.lastInsertRowid;
+
+ // Store message
+ if (message) {
+ const messageKeyId = this.getOrCreateKeyId("message");
+ this.insertMetadata.run(
+ logId,
+ messageKeyId,
+ this.safeStringify(message)
+ );
+ }
+
+ // Store all metadata
+ for (const [key, value] of Object.entries(meta)) {
+ const keyId = this.getOrCreateKeyId(key);
+ this.insertMetadata.run(logId, keyId, this.safeStringify(value));
+ }
+ });
+
+ try {
+ insertLogTxn();
+ } catch (error) {
+ console.error("SQLite logging error:", error);
+ // Don't fail silently - this could hide important issues
+ }
+
+ callback();
+ }
+}
+
+module.exports = SQLiteTransport;
diff --git a/src/utils/logging.js b/src/utils/logging.js
index ebd3a09..1d26504 100644
--- a/src/utils/logging.js
+++ b/src/utils/logging.js
@@ -4,6 +4,8 @@
const util = require("util");
const { createLogger, format, transports } = require("winston");
const DailyRotateFile = require("winston-daily-rotate-file");
+const SQLiteTransport = require("../utils/SQLiteTransport");
+const sqliteTransport = new SQLiteTransport();
// Define the root log directory
const logDir = path.join(__dirname, "..", "..", "logs");
@@ -131,8 +133,27 @@
debug: (...args) =>
writeLog("DEBUG", logStreams.debug, console.debug, ...args),
};
-// Winston logger
+// // Winston logger
+// const winstonLogger = createLogger({
+// transports: [
+// buildTransport("info", "info"),
+// buildTransport("error", "error"),
+// buildTransport("warn", "warn"),
+// buildTransport("debug", "debug"),
+// buildTransport("notice", "notice"),
+// new transports.Console({
+// level: "debug",
+// format: format.combine(format.colorize(), format.simple()),
+// }),
+// ],
+// });
const winstonLogger = createLogger({
+ format: format.combine(
+ format.timestamp(),
+ format.printf(
+ ({ timestamp, level, message }) => `[${timestamp}] [${level}] ${message}`
+ )
+ ),
transports: [
buildTransport("info", "info"),
buildTransport("error", "error"),
@@ -143,6 +164,7 @@
level: "debug",
format: format.combine(format.colorize(), format.simple()),
}),
+ sqliteTransport,
],
});
diff --git a/src/utils/sqlite3.js b/src/utils/sqlite3.js
index 373c4e3..0b0c3eb 100644
--- a/src/utils/sqlite3.js
+++ b/src/utils/sqlite3.js
@@ -15,5 +15,18 @@
js_enabled INTEGER
)
`);
-
+db.run(`
+CREATE VIEW IF NOT EXISTS analytics_view AS
+SELECT
+ id,
+ datetime(timestamp / 1000, 'unixepoch') AS timestamp_human,
+ url,
+ referrer,
+ user_agent,
+ viewport,
+ load_time,
+ event,
+ ip,
+ js_enabled
+FROM analytics;`);
module.exports = db;
diff --git a/src/utils/structuredLogger.js b/src/utils/structuredLogger.js
new file mode 100644
index 0000000..27cbfa0
--- /dev/null
+++ b/src/utils/structuredLogger.js
@@ -0,0 +1,50 @@
+const { winstonLogger } = require("./logging");
+
+module.exports = (level) => (req, res, next) => {
+ const start = process.hrtime();
+
+ res.on("finish", () => {
+ const [s, ns] = process.hrtime(start);
+ const ms = (s * 1e3 + ns / 1e6).toFixed(3);
+ const { method, url, headers, query, body, ip } = req;
+ const { statusCode } = res;
+
+ if (
+ (level === "info" && statusCode < 400) ||
+ (level === "warn" && statusCode >= 400 && statusCode < 500) ||
+ (level === "error" && statusCode >= 500)
+ ) {
+ // Flatten nested objects into key-value pairs for metadata
+ const flatten = (obj, prefix = "") => {
+ const res = {};
+ for (const [k, v] of Object.entries(obj)) {
+ const key = prefix ? `${prefix}.${k}` : k;
+ if (v !== null && typeof v === "object") {
+ Object.assign(res, flatten(v, key));
+ } else {
+ res[key] = String(v);
+ }
+ }
+ return res;
+ };
+
+ const meta = {
+ statusCode: String(statusCode),
+ ip: String(ip),
+ responseTime: `${ms}ms`,
+ contentLength: String(res.getHeader("content-length") || "0"),
+ ...flatten(headers, "headers"),
+ ...flatten(query, "query"),
+ ...flatten(body, "body"),
+ };
+
+ winstonLogger.log({
+ level,
+ message: `${method} ${url}`,
+ ...meta,
+ });
+ }
+ });
+
+ next();
+};
diff --git a/src/views/layouts/logs.handlebars b/src/views/layouts/logs.handlebars
new file mode 100644
index 0000000..42aef0f
--- /dev/null
+++ b/src/views/layouts/logs.handlebars
@@ -0,0 +1,15 @@
+{{!-- views/logs.hbs --}}
+
+
+
+
+
+ Logs Viewer
+
+
+
+
+ {{{ body }}}
+
+
+
diff --git a/src/views/pages/logs.handlebars b/src/views/pages/logs.handlebars
new file mode 100644
index 0000000..01734cb
--- /dev/null
+++ b/src/views/pages/logs.handlebars
@@ -0,0 +1,37 @@
+{{!-- pages/logs.hbs --}}
+Log Viewer
+
+
+
+
+
+
diff --git a/yarn.lock b/yarn.lock
index 2b29754..fbe8367 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -306,6 +306,14 @@
resolved "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz"
integrity sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==
+better-sqlite3@^12.2.0:
+ version "12.2.0"
+ resolved "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-12.2.0.tgz"
+ integrity sha512-eGbYq2CT+tos1fBwLQ/tkBt9J5M3JEHjku4hbvQUePCckkvVf14xWj+1m7dGoK81M/fOjFT7yM9UMeKT/+vFLQ==
+ dependencies:
+ bindings "^1.5.0"
+ prebuild-install "^7.1.1"
+
binary-extensions@^2.0.0:
version "2.3.0"
resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz"
@@ -642,6 +650,11 @@
shebang-command "^2.0.0"
which "^2.0.1"
+crypto@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz"
+ integrity sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==
+
csrf@3.1.0:
version "3.1.0"
resolved "https://registry.npmjs.org/csrf/-/csrf-3.1.0.tgz"