diff --git a/src/controllers/admin/diskSpaceController.js b/src/controllers/admin/diskSpaceController.js new file mode 100644 index 0000000..118bd14 --- /dev/null +++ b/src/controllers/admin/diskSpaceController.js @@ -0,0 +1,97 @@ +const fs = require("fs").promises; +const path = require("path"); +const { diskSpaceMonitor } = require("../../utils/logging"); + +exports.requireAdmin = (req, res, next) => { + if (req.session && req.session.isAdmin) { + next(); + } else { + res.status(403).json({ error: "Admin access required" }); + } +}; + +exports.getDiskSpaceStatus = diskSpaceMonitor.getStatusEndpoint(); +exports.manualDiskCleanup = diskSpaceMonitor.manualCleanupEndpoint(); + +exports.getDiskSpaceConfig = (req, res) => { + res.json({ + success: true, + data: { + thresholds: { + warning: diskSpaceMonitor.options.warningThreshold, + critical: diskSpaceMonitor.options.criticalThreshold, + emergency: diskSpaceMonitor.options.emergencyThreshold, + }, + cleanup: { + normalCleanupDays: diskSpaceMonitor.options.normalCleanupDays, + warningCleanupDays: diskSpaceMonitor.options.warningCleanupDays, + criticalCleanupDays: diskSpaceMonitor.options.criticalCleanupDays, + emergencyCleanupDays: diskSpaceMonitor.options.emergencyCleanupDays, + }, + monitoring: { + interval: diskSpaceMonitor.options.monitoringInterval, + maxLogDirectorySize: diskSpaceMonitor.options.maxLogDirectorySize, + }, + }, + }); +}; + +exports.updateDiskSpaceConfig = (req, res) => { + try { + const { thresholds, cleanup, monitoring } = req.body; + + if (thresholds) Object.assign(diskSpaceMonitor.options, thresholds); + if (cleanup) Object.assign(diskSpaceMonitor.options, cleanup); + if (monitoring) { + Object.assign(diskSpaceMonitor.options, monitoring); + diskSpaceMonitor.startMonitoring(); + } + + res.json({ + success: true, + message: "Configuration updated successfully", + data: diskSpaceMonitor.options, + }); + } catch (error) { + res.status(500).json({ + success: false, + error: "Failed to update configuration", + details: error.message, + }); + } +}; + +exports.getLogDirectoryContents = async (req, res) => { + try { + const logDir = path.join(__dirname, "..", "..", "logs"); + + const getDirectoryInfo = async (dir) => { + const items = await fs.readdir(dir); + const info = []; + + for (const item of items) { + const itemPath = path.join(dir, item); + const stats = await fs.stat(itemPath); + + info.push({ + name: item, + type: stats.isDirectory() ? "directory" : "file", + size: stats.size, + modified: stats.mtime, + relativePath: path.relative(logDir, itemPath), + }); + } + + return info.sort((a, b) => b.modified - a.modified); + }; + + const contents = await getDirectoryInfo(logDir); + res.json({ success: true, data: contents }); + } catch (error) { + res.status(500).json({ + success: false, + error: "Failed to get log directory contents", + details: error.message, + }); + } +}; diff --git a/src/controllers/adminTokenController.js b/src/controllers/adminTokenController.js new file mode 100644 index 0000000..3dc8f98 --- /dev/null +++ b/src/controllers/adminTokenController.js @@ -0,0 +1,32 @@ +const { validateToken, cleanupTokens } = require("../utils/adminToken"); +const HttpError = require("../utils/HttpError"); + +exports.cleanupTokensMiddleware = (req, res, next) => { + if (Math.random() < 0.1) { + cleanupTokens(); + } + next(); +}; + +exports.handleTokenRedirect = (req, res, next) => { + const { token } = req.params; + if (!token) return next(); + + if (!validateToken(token)) { + const error = new HttpError("Invalid or expired token", 401, { token }); + req.log.warn({ err: error, token }, "Token validation failed"); + return next(); + } + + const scheme = req.protocol; + const host = req.get("host"); + const referrer = req.get("Referer") || req.get("Referrer") || ""; + + const redirectTo = referrer.startsWith("http") + ? referrer + : `${scheme}://${host}${referrer}`; + + const adminLoginUrl = `${process.env.AUTH_LOGIN}${redirectTo}`; + res.set("Content-Type", "text/html"); + res.customRedirect(adminLoginUrl, 301); +}; diff --git a/src/controllers/analyticsControllers.js b/src/controllers/analyticsControllers.js new file mode 100644 index 0000000..6c8f12a --- /dev/null +++ b/src/controllers/analyticsControllers.js @@ -0,0 +1,35 @@ +const db = require("../utils/sqlite3"); + +// Route: JavaScript-enabled tracking +module.exports = (req, res) => { + const { + url = "", + referrer = "", + userAgent = "", + viewport = "", + loadTime = 0, + event = "", + } = req.body; + + const forwardedIp = req.ip; + const directIp = req.connection.remoteAddress; + const timestamp = Date.now(); + + db.run( + `INSERT INTO analytics (timestamp, url, referrer, user_agent, viewport, load_time, event, forwardedIp, directIp, js_enabled) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, + [ + timestamp, + url, + referrer, + userAgent, + viewport, + loadTime, + event, + forwardedIp, + directIp, + 1, + ] + ); + res.sendStatus(204); +}; diff --git a/src/controllers/blogControllers.js b/src/controllers/blogControllers.js new file mode 100644 index 0000000..9415501 --- /dev/null +++ b/src/controllers/blogControllers.js @@ -0,0 +1,89 @@ +// src/routes/post.js +const { marked } = require("marked"); +const fs = require("fs").promises; +const path = require("path"); +const matter = require("gray-matter"); +const { getAllPosts } = require("../utils/postFileUtils"); + +const HttpError = require("../utils/HttpError"); + +exports.blogPost = async (req, res, next) => { + const { year, month, name } = req.params; + + // Validate year: 4 digits only + if (!/^\d{4}$/.test(year)) { + return next(new HttpError("Invalid year parameter.", 400)); + } + + // Validate month: 01-12 only + if (!/^(0[1-9]|1[0-2])$/.test(month)) { + return next(new HttpError("Invalid month parameter.", 400)); + } + + // Validate name: allow alphanumeric, dash, underscore only (no dots, no slashes) + if (!/^[a-zA-Z0-9_-]+$/.test(name)) { + return next(new HttpError("Invalid post name parameter.", 400)); + } + + const mdPath = path.join( + __dirname, + "../../content/posts", + year, + month, + `${name}.md` + ); + + try { + const fileContent = await fs.readFile(mdPath, "utf8"); + const { data: frontmatter, content } = matter(fileContent); + if ( + !frontmatter.published && + (process.env.NODE_ENV === "production" || + process.env.NODE_ENV === "testing") + ) { + throw new Error("Attempted to access an unpublished page in production"); + } + const htmlContent = marked(content); + const context = { + title: frontmatter.title, + date: frontmatter.date, + author: frontmatter.author, + content: htmlContent, + }; + res.renderWithBaseContext("pages/post", context); + } catch (err) { + next(new HttpError("The requested blog post could not be found.", 404)); + } +}; + +exports.blogIndex = async (req, res) => { + const postsDir = path.join(__dirname, "../../content/posts"); + const allPosts = await getAllPosts(postsDir, { + includeUnpublished: req.query.drafts === "true", + }); + + const publishedPosts = allPosts.filter( + (post) => + post.published || + process.env.NODE_ENV === "production" || + process.env.NODE_ENV === "testing" + ); + // Sort posts descending by date + publishedPosts.sort((a, b) => new Date(b.date) - new Date(a.date)); + + // Prepare context compatible with the blog-index.hbs layout + // Add `templateContent` as excerpt or limited content if needed here + // For now, use a simple excerpt from markdown or placeholder + const posts = publishedPosts.map((post) => ({ + url: post.url, + data: { + title: post.title, + date: post.date, + tags: post.tags, + published: post.published, // add this + }, + templateContent: post.excerpt || "", + })); + + res.renderWithBaseContext("pages/blog_index", { collections: { posts } }); +}; diff --git a/src/controllers/contactControllers.js b/src/controllers/contactControllers.js new file mode 100644 index 0000000..6871156 --- /dev/null +++ b/src/controllers/contactControllers.js @@ -0,0 +1,125 @@ +// src/routes/contact.js +const sendContactMail = require("../utils/sendContactMail"); + +const verifyHCaptcha = require("../utils/verifyHCaptcha"); +const { qualifyLink } = require("../utils/qualifyLinks"); +const { + captureSecurityData, + analyzeThreatLevel, + logSecurityEvent, +} = require("../utils/securityForensics"); +const { validateAndSanitizeEmail } = require("../utils/emailValidator"); + +const { + isValidInput, + handleInvalidInput, + buildSecurityData, + logSubmission, + handleCaptchaFailure, + blockHighThreat, + prepareEmail, + logSuccess, + logUnhandledError, +} = require("./heplers/contactHelpers"); + +module.exports.handleContactFormPost = async (req, res, next) => { + try { + const { name, email, message, subject, clientData } = req.body; + const hcaptchaToken = + req.body.hcaptchaToken || req.body["g-recaptcha-response"]; + const emailResult = validateAndSanitizeEmail(email); + + if (!isValidInput(name, subject, message, emailResult)) { + return await handleInvalidInput( + req, + next, + { name, email, subject, message }, + emailResult + ); + } + + const securityData = buildSecurityData(req, { + formData: { name, email, message, subject }, + captchaProvided: !!hcaptchaToken, + clientData, + step: "initial_validation", + }); + + const threatAnalysis = analyzeThreatLevel( + { name, email, message, subject }, + securityData + ); + + await logSubmission(securityData, threatAnalysis, { + name, + email, + message, + subject, + }); + + if (!hcaptchaToken) + return await handleCaptchaFailure( + securityData, + threatAnalysis, + next, + "missing_captcha" + ); + + const captchaValid = await verifyHCaptcha(hcaptchaToken); + if (!captchaValid) + return await handleCaptchaFailure( + securityData, + threatAnalysis, + next, + "captcha_failed" + ); + + if (threatAnalysis.level === "high") { + await blockHighThreat(securityData, threatAnalysis); + res.customRedirect("/contact/thankyou"); + return; + } + + const emailData = prepareEmail( + { name, email, message, subject }, + threatAnalysis + ); + await sendContactMail(emailData); + + await logSuccess(securityData, threatAnalysis); + res.customRedirect("/contact/thankyou"); + } catch (err) { + await logUnhandledError(req, err); + next(err); + } +}; +module.exports.renderContactForm = async (req, res) => { + const securityData = captureSecurityData(req, { + pageAccess: "contact_form", + processingStep: "page_render", + }); + + await logSecurityEvent(securityData, "page_access"); + + const context = { + csrfToken: res.locals.csrfToken, + title: "Contact", + formAction: qualifyLink("/contact"), + formMethod: "POST", + }; + res.renderWithBaseContext("pages/contact.handlebars", context); +}; +module.exports.renderThankYouPage = async (req, res) => { + const securityData = captureSecurityData(req, { + pageAccess: "thankyou_page", + processingStep: "page_render", + }); + + await logSecurityEvent(securityData, "thankyou_access"); + + res.renderGenericMessage({ + title: "Thank You", + message: + "Your message has been sent successfully. We will get back to you shortly.", + }); +}; diff --git a/src/controllers/docsControllers.js b/src/controllers/docsControllers.js new file mode 100644 index 0000000..8789d30 --- /dev/null +++ b/src/controllers/docsControllers.js @@ -0,0 +1,105 @@ +const fs = require("fs/promises"); +const { qualifyLink } = require("../utils/qualifyLinks"); +const { baseUrl } = require("../utils/baseUrl"); +const HttpError = require("../utils/HttpError"); +const docsContext = require("../utils/docsContext"); +const { + loadDocFile, + filterModuleSecurityKeys, + getYamlFileNames, + docsDir, +} = require("../services/docsService"); + +exports.renderDocsIndex = async (req, res, next) => { + try { + const yamlFiles = await getYamlFileNames(); + const context = await docsContext(req.isAuthenticated, { + layout: "docs", + docPath: "/docs", + docModule: null, + }); + + res.render("docs/index", { + ...context, + docsPaths: yamlFiles.map((name) => `${req.baseUrl || ""}/${name}`), + }); + } catch (err) { + req.log.error(err.stack); + next(new HttpError("Failed to read docs directory", 500)); + } +}; + +exports.renderDocsSummary = async (req, res, next) => { + try { + const summaries = []; + const yamlFiles = await getYamlFileNames(); + + for (const file of yamlFiles) { + const doc = await loadDocFile(file); + if (doc?.crossCuttingSummary) { + summaries.push({ path: file, summary: doc.crossCuttingSummary }); + } + } + + const context = await docsContext(req.isAuthenticated, { + layout: "docs", + docPath: baseUrl + "/docs/summary", + docModule: null, + }); + + res.render("docs/summary", { + ...context, + summaries, + }); + } catch (err) { + next(err); + } +}; + +exports.renderDocsByType = async (req, res, next) => { + const { moduleType: docPath } = req.params; + const doc = await loadDocFile(docPath); + + const context = await docsContext(req.isAuthenticated, { + layout: "docs", + docPath: baseUrl + "/docs" + docPath, + docModule: null, + }); + + const modulesWithLinks = Object.entries(doc.modules).map(([key]) => ({ + name: key, + url: `${baseUrl}/docs/${docPath}/${key}`, + })); + + res.render("docs/path", { + ...context, + docsHome: qualifyLink("/docs"), + pathName: docPath, + crossCuttingSummary: doc.crossCuttingSummary, + modules: modulesWithLinks, + }); +}; + +exports.renderDocsModule = async (req, res, next) => { + const { moduleType: docPath, module } = req.params; + const doc = await loadDocFile(docPath); + if (!doc) return next(new HttpError("Documentation not found", 404)); + const moduleDoc = doc.modules[module]; + if (!moduleDoc) + return next(new HttpError("Module documentation not found", 404)); + + const context = await docsContext(req.isAuthenticated, { + layout: "docs", + docPath, + docModule: module, + }); + + res.render("docs/module", { + ...context, + docsHome: qualifyLink("/docs"), + pathUrl: qualifyLink("/docs/" + docPath), + pathName: docPath, + module, + moduleDoc: filterModuleSecurityKeys(moduleDoc), + }); +}; diff --git a/src/controllers/errorPageController.js b/src/controllers/errorPageController.js new file mode 100644 index 0000000..19f49c0 --- /dev/null +++ b/src/controllers/errorPageController.js @@ -0,0 +1,17 @@ +// src/routes/errorPage +const { getErrorContext } = require("../utils/errorContext"); + +module.exports = async (req, res) => { + const code = parseInt(req.query.code, 10) || 500; + const errorContext = getErrorContext(code); + + const context = { + title: errorContext.title, + message: errorContext.message, + statusCode: errorContext.statusCode, + content: "", + }; + + res.status(errorContext.statusCode); + res.renderGenericMessage(context); +}; diff --git a/src/controllers/filteredLogsController.js b/src/controllers/filteredLogsController.js new file mode 100644 index 0000000..44c8f6d --- /dev/null +++ b/src/controllers/filteredLogsController.js @@ -0,0 +1,26 @@ +const fs = require("fs"); +const { getExcludeRoutes, shouldExclude } = require("../lib/routeFilter"); +const { parseLogLine } = require("../lib/logParser"); + +function getFilteredLogs(req, res) { + const excludeRoutes = getExcludeRoutes(req.app._router); + const logPath = "/var/log/nginx/access.log"; + + try { + const raw = fs.readFileSync(logPath, "utf8"); + const lines = raw.split("\n"); + + const filtered = lines.filter((line) => { + if (!line.trim()) return false; + const parsed = parseLogLine(line); + if (!parsed) return false; + return !shouldExclude(parsed.ip, parsed.url, excludeRoutes); + }); + + res.type("text/plain").status(200).send(filtered.join("\n")); + } catch { + res.sendStatus(500); + } +} + +module.exports = { getFilteredLogs }; diff --git a/src/controllers/heplers/contactHelpers.js b/src/controllers/heplers/contactHelpers.js new file mode 100644 index 0000000..3686830 --- /dev/null +++ b/src/controllers/heplers/contactHelpers.js @@ -0,0 +1,131 @@ +// src/routes/helpers/contactHelpers.js +const HttpError = require("../../utils/HttpError"); +const { + captureSecurityData, + logSecurityEvent, +} = require("../../utils/securityForensics"); + +function isReasonableLength(str, maxLen) { + return ( + typeof str === "string" && str.trim().length > 0 && str.length <= maxLen + ); +} + +function isValidInput(name, subject, message, emailResult) { + return ( + emailResult.valid && + isReasonableLength(name, 100) && + isReasonableLength(subject, 150) && + isReasonableLength(message, 2000) + ); +} +async function handleInvalidInput(req, next, formData, emailResult) { + const invalidData = captureSecurityData(req, { + formData, + failureReason: emailResult.message || "invalid_input", + processingStep: "validation", + }); + await logSecurityEvent(invalidData, "validation_failure"); + next(new HttpError("Invalid input", 400, invalidData)); +} + +function buildSecurityData( + req, + { formData, captchaProvided, clientData, step } +) { + return captureSecurityData(req, { + formData, + captchaProvided, + clientData, + processingStep: step, + }); +} + +async function logSubmission(securityData, threatAnalysis, formData) { + await logSecurityEvent( + { + ...securityData, + threatAnalysis, + formData: { + name: formData.name, + email: formData.email, + hasMessage: !!formData.message, + hasSubject: !!formData.subject, + }, + }, + "contact_submission" + ); +} + +async function handleCaptchaFailure( + securityData, + threatAnalysis, + next, + reason +) { + await logSecurityEvent( + { + ...securityData, + threatAnalysis, + validationResult: "failed", + failureReason: reason, + }, + "validation_failure" + ); + next(new HttpError("Captcha verification failed", 400)); +} + +async function blockHighThreat(securityData, threatAnalysis) { + await logSecurityEvent( + { + ...securityData, + threatAnalysis, + action: "blocked_high_threat", + }, + "threat_blocked" + ); +} + +function prepareEmail({ name, email, message, subject }, threatAnalysis) { + const base = { name, email, message, subject }; + if (threatAnalysis.level === "medium") { + base.securityFlag = `[SECURITY REVIEW REQUIRED - Score: ${threatAnalysis.score}]`; + } + return base; +} + +async function logSuccess(securityData, threatAnalysis) { + await logSecurityEvent( + { + ...securityData, + threatAnalysis, + processingResult: "success", + emailSent: true, + }, + "contact_success" + ); +} + +async function logUnhandledError(req, err) { + const errorData = captureSecurityData(req, { + error: { + message: err.message, + stack: err.stack, + name: err.name, + }, + processingStep: "error_handling", + }); + await logSecurityEvent(errorData, "contact_error"); +} + +module.expors = { + isValidInput, + handleInvalidInput, + buildSecurityData, + logSubmission, + handleCaptchaFailure, + blockHighThreat, + prepareEmail, + logSuccess, + logUnhandledError, +}; diff --git a/src/controllers/newsletterController.js b/src/controllers/newsletterController.js new file mode 100644 index 0000000..2e20993 --- /dev/null +++ b/src/controllers/newsletterController.js @@ -0,0 +1,68 @@ +const HttpError = require("../utils/HttpError"); +const { + saveEmail, + unsubscribeEmail, +} = require("../services/newsletterService"); +const sendNewsletterSubscriptionMail = require("../utils/sendNewsletterSubscriptionMail"); +const { validateAndSanitizeEmail } = require("../utils/emailValidator"); +const { ERRORS } = require("../constants/newsletterConstants"); +const { qualifyLink } = require("../utils/qualifyLinks"); + +exports.renderNewsletterForm = async (req, res) => { + res.renderWithBaseContext("pages/newsletter.handlebars", { + csrfToken: res.locals.csrfToken, + title: "Newsletter", + formAction: qualifyLink("/newsletter"), + formMethod: "POST", + }); +}; + +exports.renderSubscriptionSuccess = async (req, res) => { + res.renderWithBaseContext("pages/newsletter-success.handlebars", { + title: "Unsubscribed", + message: + "You’ve successfully subscribed to my newsletter. Stay tuned for updates.", + }); +}; + +exports.handleNewsletterSubscribe = async (req, res, next) => { + const { email: rawEmail } = req.body; + const result = validateAndSanitizeEmail(rawEmail); + + if (!result.valid) { + return next(new HttpError(result.message, 400)); + } + + const sanitizedEmail = result.email; + + try { + await saveEmail(sanitizedEmail); + await sendNewsletterSubscriptionMail({ email: sanitizedEmail }); + res.customRedirect("/newsletter/success"); + } catch (err) { + req.log.error("Newsletter subscription error:", err); + if (err.code === "DUPLICATE_EMAIL") { + return res.customRedirect("/newsletter/success"); + } + next(err); + } +}; + +exports.handleUnsubscribe = async (req, res, next) => { + const { valid, email, message } = validateAndSanitizeEmail(req.query.email); + + if (!valid) { + return next(new HttpError(message || ERRORS.INVALID_EMAIL, 400)); + } + + try { + await unsubscribeEmail(email); + res.renderGenericMessage({ + title: "Thank You", + message: + "You’ve been successfully removed from the newsletter mailing list.", + }); + } catch (err) { + next(new HttpError({ error: "Failed to unsubscribe" }, 500)); + } +}; diff --git a/src/controllers/presentationController.js b/src/controllers/presentationController.js new file mode 100644 index 0000000..97382a6 --- /dev/null +++ b/src/controllers/presentationController.js @@ -0,0 +1,45 @@ +const path = require("path"); +const fs = require("fs/promises"); +const yaml = require("js-yaml"); + +const HttpError = require("../utils/HttpError"); +const { qualifyLink } = require("../utils/qualifyLinks"); +const { baseUrl } = require("../utils/baseUrl"); + +const yamlPath = path.resolve("content/presentation.yaml"); + +async function renderPresentation(req, res, next) { + try { + const fileContent = await fs.readFile(yamlPath, "utf8"); + const data = yaml.load(fileContent); + + if (data.slides) { + for (const slide of data.slides) { + if (slide.images) { + slide.images = slide.images.map((img) => { + if (img.src && !img.src.match(/^https?:\/\//)) { + img.src = qualifyLink(img.src); + } + return img; + }); + } + } + } + + res.render("pages/presentation", { + layout: "presentation", + slides: data.slides, + title: data.title, + baseUrl, + returnUrl: req.returnUrl, + nonce: res.locals.nonce, + }); + } catch (err) { + req.log.error(err.stack); + next(new HttpError("Failed to load presentation data", 500)); + } +} + +module.exports = { + renderPresentation, +}; diff --git a/src/controllers/robotsController.js b/src/controllers/robotsController.js new file mode 100644 index 0000000..95804f0 --- /dev/null +++ b/src/controllers/robotsController.js @@ -0,0 +1,11 @@ +module.exports = (req, res) => { + const robotsTxt = ` +User-agent: * +Disallow: + +Sitemap: ${req.protocol}://${req.get("host")}/sitemap.xml +`.trim(); + + res.type("text/plain"); + res.send(robotsTxt); +}; diff --git a/src/controllers/rssFeedController.js b/src/controllers/rssFeedController.js new file mode 100644 index 0000000..772b238 --- /dev/null +++ b/src/controllers/rssFeedController.js @@ -0,0 +1,9 @@ +// routes/rss.js +const generateRSSFeed = require("../services/rssFeedService"); + +module.exports = async (req, res) => { + const domain = process.env.DOMAIN; + const xml = await generateRSSFeed("content/posts", `https://${domain}`); + res.set("Content-Type", "application/rss+xml"); + res.send(xml); +}; diff --git a/src/controllers/secured/logsController.js b/src/controllers/secured/logsController.js new file mode 100644 index 0000000..5faac9c --- /dev/null +++ b/src/controllers/secured/logsController.js @@ -0,0 +1,184 @@ +const path = require("path"); +const fs = require("fs"); +const Database = require("better-sqlite3"); +const { winstonLogger } = require("../../utils/logging"); +const analyticsDb = require("../../utils/sqlite3"); + +const allowedLevels = ["warn", "error", "info", "debug", "functions", "notice"]; +const logsDbPath = path.resolve(__dirname, "../../../data/logs.sqlite3"); + +if (!fs.existsSync(logsDbPath)) { + fs.closeSync(fs.openSync(logsDbPath, "w")); +} + +const logsDb = new Database(logsDbPath, { readonly: true }); + +exports.renderLogsPage = (req, res) => { + res.renderWithBaseContext("admin-pages/logs", { + showSidebar: false, + showFooter: false, + }); +}; + +exports.fetchLogs = (req, res) => { + const log_level = req.query.log_level || "*"; + const date = req.query.date || "*"; + const page = parseInt(req.query.page) || 1; + const limit = parseInt(req.query.limit) || 50; + const offset = (page - 1) * limit; + + if (log_level !== "*" && !allowedLevels.includes(log_level)) { + return res.status(400).json({ error: "Invalid log_level" }); + } + + const conditions = []; + const params = []; + + if (log_level !== "*") { + conditions.push("level = ?"); + params.push(log_level); + } + + if (date !== "*") { + conditions.push("date(timestamp) = ?"); + params.push(date); + } + + const whereClause = conditions.length + ? "WHERE " + conditions.join(" AND ") + : ""; + const countQuery = `SELECT COUNT(*) as total FROM logs ${whereClause}`; + const totalResult = logsDb.prepare(countQuery).get(...params); + const total = totalResult.total; + + const logQuery = ` + SELECT id, timestamp, level + FROM logs + ${whereClause} + ORDER BY timestamp DESC + LIMIT ? OFFSET ? + `; + + try { + const logRows = logsDb.prepare(logQuery).all(...params, limit, offset); + if (logRows.length === 0) { + return res.json({ + logs: [], + pagination: { page, limit, total, totalPages: 0, hasMore: false }, + }); + } + + const logIds = logRows.map((row) => row.id); + const placeholders = logIds.map(() => "?").join(","); + const metadataQuery = ` + SELECT m.log_id, k.key, m.value + FROM log_metadata m + JOIN keys k ON k.id = m.key_id + WHERE m.log_id IN (${placeholders}) + `; + const metadataRows = logsDb.prepare(metadataQuery).all(...logIds); + + const metadataMap = {}; + metadataRows.forEach((row) => { + if (!metadataMap[row.log_id]) metadataMap[row.log_id] = {}; + try { + metadataMap[row.log_id][row.key] = JSON.parse(row.value); + } catch { + metadataMap[row.log_id][row.key] = row.value; + } + }); + + const logs = logRows.map((row) => ({ + id: row.id, + timestamp: row.timestamp, + level: row.level, + ...(metadataMap[row.id] || {}), + })); + + const totalPages = Math.ceil(total / limit); + + res.json({ + logs, + pagination: { + page, + limit, + total, + totalPages, + hasMore: page < totalPages, + }, + }); + } catch (error) { + winstonLogger.error("Query error:", error); + res.status(500).json({ error: "Failed to query logs" }); + } +}; + +exports.fetchAnalyticsLogs = (req, res) => { + const page = parseInt(req.query.page) || 1; + const limit = parseInt(req.query.limit) || 50; + const offset = (page - 1) * limit; + + if (page < 1 || limit < 1) { + return res.status(400).json({ error: "Invalid pagination parameters" }); + } + + const conditions = []; + const params = []; + const whereClause = conditions.length + ? "WHERE " + conditions.join(" AND ") + : ""; + + try { + const countQuery = `SELECT COUNT(*) as total FROM analytics_view ${whereClause}`; + analyticsDb.get(countQuery, params, (err, totalResult) => { + if (err) { + winstonLogger.error("Count query error:", err); + return res.status(500).json({ error: "Failed to query logs" }); + } + + const total = totalResult.total; + const queryParams = [...params, limit, offset]; + + const logsQuery = ` + SELECT + id, + timestamp_human AS timestamp, + url, + referrer, + user_agent, + viewport, + load_time, + event, + forwardedIp, + directIp, + js_enabled + FROM analytics_view + ${whereClause} + ORDER BY timestamp DESC + LIMIT ? OFFSET ? + `; + + analyticsDb.all(logsQuery, queryParams, (err, logs) => { + if (err) { + winstonLogger.error("Logs query error:", err); + return res.status(500).json({ error: "Failed to query logs" }); + } + + const totalPages = Math.ceil(total / limit); + res.json({ + logs, + pagination: { + page, + limit, + total, + totalPages, + hasMore: page < totalPages, + }, + }); + }); + }); + } catch (error) { + winstonLogger.error("Query error:", error); + res.status(500).json({ error: "Failed to query logs" }); + } +}; diff --git a/src/controllers/sitemapControllers.js b/src/controllers/sitemapControllers.js new file mode 100644 index 0000000..0e5c42d --- /dev/null +++ b/src/controllers/sitemapControllers.js @@ -0,0 +1,56 @@ +const fs = require("fs"); +const path = require("path"); +const Handlebars = require("handlebars"); +const sitemapService = require("../services/sitemapService"); +const { qualifyLink } = require("../utils/qualifyLinks"); + +// Precompile XML template once +const xmlTplSrc = fs.readFileSync( + path.resolve(__dirname, "../views/pages/sitemap-xml.handlebars"), + "utf-8" +); +const xmlTpl = Handlebars.compile(xmlTplSrc); + +async function getSitemapHtml(req, res, next) { + try { + const sitemap = await sitemapService.getCompleteSitemap(); + const context = { + title: "Site Map", + sitemap, + }; + res.renderWithBaseContext("pages/sitemap", context); + } catch (err) { + next(err); + } +} + +async function getSitemapJson(req, res, next) { + try { + const sitemap = await sitemapService.getCompleteSitemap(); + res.json({ title: "Site Map", sitemap }); + } catch (err) { + next(err); + } +} + +async function getSitemapXml(req, res, next) { + try { + const urls = await sitemapService.getAllUrls(); + const formattedUrls = urls.map((url) => ({ + loc: qualifyLink(url.loc), + lastmod: url.lastmod, + changefreq: url.changefreq, + priority: url.priority, + })); + const xml = xmlTpl({ urls: formattedUrls }); + res.type("application/xml").send(xml); + } catch (err) { + next(err); + } +} + +module.exports = { + getSitemapHtml, + getSitemapJson, + getSitemapXml, +}; diff --git a/src/controllers/tagsController.js b/src/controllers/tagsController.js new file mode 100644 index 0000000..31ba732 --- /dev/null +++ b/src/controllers/tagsController.js @@ -0,0 +1,32 @@ +const { getPostsByTag } = require("../services/tagsService"); +const { getAllTags } = require("../services/sitemapService"); +const HttpError = require("../utils/HttpError"); +const { normalizeTag } = require("../utils/normalize"); + +exports.renderTagsPage = async (req, res, next) => { + try { + const tags = await getAllTags(); + res.renderWithBaseContext("pages/tags", { tags }); + } catch (err) { + next(err); + } +}; + +exports.renderTagPostsPage = async (req, res, next) => { + try { + const rawTag = req.params.tag; + const normalizedTag = normalizeTag(rawTag); + const posts = await getPostsByTag(rawTag); + + if (!posts || posts.length === 0) { + return next(new HttpError("No posts found for this tag.", 404)); + } + + res.renderWithBaseContext("pages/tag-posts", { + tag: normalizedTag, + posts, + }); + } catch (err) { + next(err); + } +}; diff --git a/src/controllers/techkStackController.js b/src/controllers/techkStackController.js new file mode 100644 index 0000000..a1826ef --- /dev/null +++ b/src/controllers/techkStackController.js @@ -0,0 +1,21 @@ +// controllers/techStackController.js + +const HttpError = require("../utils/HttpError"); // Adjust path as needed +const techStack = require("../../content/techStack.json"); // JSON file from previous message +const { baseUrl } = require("../utils/baseUrl"); +module.exports = (req, res, next) => { + try { + const techWithBase = techStack.map((item) => ({ + ...item, + png: baseUrl + item.png, + svg: baseUrl + item.svg, + })); + res.renderWithBaseContext("pages/stack", { + tech: techWithBase, + }); + } catch (err) { + next( + new HttpError("Failed to load tech stack", 500, { originalError: err }) + ); + } +}; diff --git a/src/lib/logParser.js b/src/lib/logParser.js new file mode 100644 index 0000000..81988bb --- /dev/null +++ b/src/lib/logParser.js @@ -0,0 +1,15 @@ +function parseLogLine(line) { + const parts = line.split(" "); + if (parts.length < 1) return null; + + const ip = parts[0]; + const match = line.match(/"([^"]*)"/); + if (!match) return null; + + const request = match[1].split(" "); + if (request.length < 2) return null; + + return { ip, url: request[1] }; +} + +module.exports = { parseLogLine }; diff --git a/src/lib/routeFilter.js b/src/lib/routeFilter.js new file mode 100644 index 0000000..c1ea3a8 --- /dev/null +++ b/src/lib/routeFilter.js @@ -0,0 +1,55 @@ +const excludeIps = new Set(["192.168.1.50", "73.19.173.54"]); + +function flattenRouterLayers(stack, acc = []) { + for (const layer of stack) { + acc.push(layer); + const h = layer.handle; + if (typeof h === "function") { + if (h.stack && Array.isArray(h.stack)) { + flattenRouterLayers(h.stack, acc); + } else if (h.handle && h.handle.stack && Array.isArray(h.handle.stack)) { + flattenRouterLayers(h.handle.stack, acc); + } + } + } + return acc; +} + +function getExcludeRoutes(router) { + const rootStack = router.stack; + const flat = flattenRouterLayers(rootStack); + const routes = []; + + for (const layer of flat) { + if (layer.route && layer.route.path) { + routes.push(layer.route.path); + } + } + + return routes; +} + +function shouldExclude(ip, url, excludeRoutes) { + if (excludeIps.has(ip)) return true; + + for (const route of excludeRoutes) { + if ( + route.includes(":token") || + route.includes(":year") || + route.includes(":month") || + route.includes(":name") + ) { + const routePrefix = route.split(":")[0]; + if (url.startsWith(routePrefix)) return true; + } else { + if (url === route || url.startsWith(route)) return true; + } + } + + return false; +} + +module.exports = { + getExcludeRoutes, + shouldExclude, +}; diff --git a/src/middleware/resolveReturnUrl.js b/src/middleware/resolveReturnUrl.js new file mode 100644 index 0000000..6448e05 --- /dev/null +++ b/src/middleware/resolveReturnUrl.js @@ -0,0 +1,29 @@ +const { baseUrl } = require("../utils/baseUrl"); + +function resolveReturnUrl(req, res, next) { + const myDomain = "jasonpoage.com"; + const fallbackUrl = baseUrl; + const referrer = req.body?.referrer; + + req.returnUrl = fallbackUrl; + + if (typeof referrer !== "string") return next(); + + try { + const url = new URL(referrer); + const isSameDomain = url.hostname.endsWith(myDomain); + const isNotPresentation = !url.pathname.includes( + "/projects/website-presentation" + ); + + if (isSameDomain && isNotPresentation) { + req.returnUrl = referrer; + } + } catch { + // noop + } + + next(); +} + +module.exports = resolveReturnUrl; diff --git a/src/routes/admin.js b/src/routes/admin.js index 2c4dd97..bf38e8f 100644 --- a/src/routes/admin.js +++ b/src/routes/admin.js @@ -1,43 +1,12 @@ -// src/routes/admin.js +// src/routes/admin. const express = require("express"); -const { validateToken, cleanupTokens } = require("../utils/adminToken"); -const HttpError = require("../utils/HttpError"); const router = express.Router(); +const controller = require("../controllers/adminTokenController"); -// Middleware to cleanup expired tokens periodically -router.use((req, res, next) => { - // Clean up expired tokens on each request - if (Math.random() < 0.1) { - // 10% chance to cleanup on each request - cleanupTokens(); - } - next(); -}); - -router.get("/:token", (req, res, next) => { - const { token } = req.params; - if (!token) { - return next(); - } - - // Validate the token before proceeding - if (!validateToken(token)) { - const error = new HttpError("Invalid or expired token", 401, { token }); - req.log.warn({ err: error, token }, "Token validation failed"); - return next(); // fail silently - } - - const scheme = req.protocol; - const host = req.get("host"); - const referrer = req.get("Referer") || req.get("Referrer") || ""; - - const rd = referrer.startsWith("http") - ? referrer - : `${scheme}://${host}${referrer}`; - - const adminLoginUrl = `${process.env.AUTH_LOGIN}${rd}`; - res.set("Content-Type", "text/html"); - res.customRedirect(adminLoginUrl, 301); -}); +router.get( + "/:token", + controller.cleanupTokensMiddleware, + controller.handleTokenRedirect +); module.exports = router; diff --git a/src/routes/admin/dskMonitor.js b/src/routes/admin/dskMonitor.js index 61471c1..57ed5a7 100644 --- a/src/routes/admin/dskMonitor.js +++ b/src/routes/admin/dskMonitor.js @@ -1,128 +1,15 @@ -// routes/admin.js const express = require("express"); -const { diskSpaceMonitor } = require("../utils/logging"); - const router = express.Router(); +const controller = require("../controllers/admin/diskSpaceController"); +const { diskSpaceMonitor } = require("../../utils/logging"); -// Middleware to check admin authentication (implement as needed) -const requireAdmin = (req, res, next) => { - // Add your admin authentication logic here - // For example, check session, JWT token, etc. - if (req.session && req.session.isAdmin) { - next(); - } else { - res.status(403).json({ error: "Admin access required" }); - } -}; - -// Apply admin middleware to all routes -router.use(requireAdmin); - -// Apply disk space monitoring middleware +router.use(controller.requireAdmin); router.use(diskSpaceMonitor.adminNotificationMiddleware()); -// Get disk space status -router.get("/disk-space/status", diskSpaceMonitor.getStatusEndpoint()); - -// Perform manual cleanup -router.post("/disk-space/cleanup", diskSpaceMonitor.manualCleanupEndpoint()); - -// Get disk space configuration -router.get("/disk-space/config", (req, res) => { - res.json({ - success: true, - data: { - thresholds: { - warning: diskSpaceMonitor.options.warningThreshold, - critical: diskSpaceMonitor.options.criticalThreshold, - emergency: diskSpaceMonitor.options.emergencyThreshold, - }, - cleanup: { - normalCleanupDays: diskSpaceMonitor.options.normalCleanupDays, - warningCleanupDays: diskSpaceMonitor.options.warningCleanupDays, - criticalCleanupDays: diskSpaceMonitor.options.criticalCleanupDays, - emergencyCleanupDays: diskSpaceMonitor.options.emergencyCleanupDays, - }, - monitoring: { - interval: diskSpaceMonitor.options.monitoringInterval, - maxLogDirectorySize: diskSpaceMonitor.options.maxLogDirectorySize, - }, - }, - }); -}); - -// Update disk space configuration -router.put("/disk-space/config", (req, res) => { - try { - const { thresholds, cleanup, monitoring } = req.body; - - if (thresholds) { - Object.assign(diskSpaceMonitor.options, thresholds); - } - - if (cleanup) { - Object.assign(diskSpaceMonitor.options, cleanup); - } - - if (monitoring) { - Object.assign(diskSpaceMonitor.options, monitoring); - // Restart monitoring with new interval - diskSpaceMonitor.startMonitoring(); - } - - res.json({ - success: true, - message: "Configuration updated successfully", - data: diskSpaceMonitor.options, - }); - } catch (error) { - res.status(500).json({ - success: false, - error: "Failed to update configuration", - details: error.message, - }); - } -}); - -// Get log directory contents -router.get("/logs/directory", async (req, res) => { - try { - const fs = require("fs").promises; - const path = require("path"); - - const logDir = path.join(__dirname, "..", "..", "logs"); - const getDirectoryInfo = async (dir) => { - const items = await fs.readdir(dir); - const info = []; - - for (const item of items) { - const itemPath = path.join(dir, item); - const stats = await fs.stat(itemPath); - - info.push({ - name: item, - type: stats.isDirectory() ? "directory" : "file", - size: stats.size, - modified: stats.mtime, - relativePath: path.relative(logDir, itemPath), - }); - } - - return info.sort((a, b) => b.modified - a.modified); - }; - - const contents = await getDirectoryInfo(logDir); - res.json({ - success: true, - data: contents, - }); - } catch (error) { - res.status(500).json({ - success: false, - error: "Failed to get log directory contents", - details: error.message, - }); - } -}); +router.get("/disk-space/status", controller.getDiskSpaceStatus); +router.post("/disk-space/cleanup", controller.manualDiskCleanup); +router.get("/disk-space/config", controller.getDiskSpaceConfig); +router.put("/disk-space/config", controller.updateDiskSpaceConfig); +router.get("/logs/directory", controller.getLogDirectoryContents); module.exports = router; diff --git a/src/routes/analytics.js b/src/routes/analytics.js deleted file mode 100644 index 6c8f12a..0000000 --- a/src/routes/analytics.js +++ /dev/null @@ -1,35 +0,0 @@ -const db = require("../utils/sqlite3"); - -// Route: JavaScript-enabled tracking -module.exports = (req, res) => { - const { - url = "", - referrer = "", - userAgent = "", - viewport = "", - loadTime = 0, - event = "", - } = req.body; - - const forwardedIp = req.ip; - const directIp = req.connection.remoteAddress; - const timestamp = Date.now(); - - db.run( - `INSERT INTO analytics (timestamp, url, referrer, user_agent, viewport, load_time, event, forwardedIp, directIp, js_enabled) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, - [ - timestamp, - url, - referrer, - userAgent, - viewport, - loadTime, - event, - forwardedIp, - directIp, - 1, - ] - ); - res.sendStatus(204); -}; diff --git a/src/routes/blog_index.js b/src/routes/blog_index.js deleted file mode 100644 index 1cfb114..0000000 --- a/src/routes/blog_index.js +++ /dev/null @@ -1,38 +0,0 @@ -const express = require("express"); -const path = require("path"); -const { getAllPosts } = require("../utils/postFileUtils"); -const router = express.Router(); - -router.get("/blog", async (req, res) => { - const postsDir = path.join(__dirname, "../../content/posts"); - const allPosts = await getAllPosts(postsDir, { - includeUnpublished: req.query.drafts === "true", - }); - - const publishedPosts = allPosts.filter( - (post) => - post.published || - process.env.NODE_ENV === "production" || - process.env.NODE_ENV === "testing" - ); - // Sort posts descending by date - publishedPosts.sort((a, b) => new Date(b.date) - new Date(a.date)); - - // Prepare context compatible with the blog-index.hbs layout - // Add `templateContent` as excerpt or limited content if needed here - // For now, use a simple excerpt from markdown or placeholder - const posts = publishedPosts.map((post) => ({ - url: post.url, - data: { - title: post.title, - date: post.date, - tags: post.tags, - published: post.published, // add this - }, - templateContent: post.excerpt || "", - })); - - res.renderWithBaseContext("pages/blog_index", { collections: { posts } }); -}); - -module.exports = router; diff --git a/src/routes/contact.js b/src/routes/contact.js index 16ac487..4a4fda7 100644 --- a/src/routes/contact.js +++ b/src/routes/contact.js @@ -1,183 +1,17 @@ // src/routes/contact.js const express = require("express"); const router = express.Router(); -const sendContactMail = require("../utils/sendContactMail"); - const formLimiter = require("../utils/formLimiter"); -const verifyHCaptcha = require("../utils/verifyHCaptcha"); -const crypto = require("crypto"); -const HttpError = require("../utils/HttpError"); -const { qualifyLink } = require("../utils/qualifyLinks"); const { - captureSecurityData, - analyzeThreatLevel, - logSecurityEvent, -} = require("../utils/securityForensics"); -const { validateAndSanitizeEmail } = require("../utils/emailValidator"); + handleContactFormPost, + renderContactForm, + renderThankYouPage, +} = require("../controllers/contactControllers"); -function isReasonableLength(str, maxLen) { - return ( - typeof str === "string" && str.trim().length > 0 && str.length <= maxLen - ); -} -router.post("/contact", formLimiter, async (req, res, next) => { - try { - const { name, email, message, subject, clientData } = req.body; - const hcaptchaToken = - req.body.hcaptchaToken || req.body["g-recaptcha-response"]; +router.post("/contact", formLimiter, handleContactFormPost); - const emailResult = validateAndSanitizeEmail(email); +router.get("/contact", renderContactForm); - if ( - !emailResult.valid || - !isReasonableLength(name, 100) || - !isReasonableLength(subject, 150) || - !isReasonableLength(message, 2000) - ) { - const invalidData = captureSecurityData(req, { - formData: { name, email, subject, message }, - failureReason: emailResult.message || "invalid_input", - processingStep: "validation", - }); - - await logSecurityEvent(invalidData, "validation_failure"); - return next(new HttpError("Invalid input", 400, invalidData)); - } - // Capture security data - const securityData = captureSecurityData(req, { - formData: { name, email, message, subject }, - captchaProvided: !!hcaptchaToken, - clientData: clientData, // From client-side - processingStep: "initial_validation", - }); - - // Analyze threat level - const threatAnalysis = analyzeThreatLevel( - { name, email, message, subject }, - securityData - ); - - // Enhanced logging with threat analysis - await logSecurityEvent( - { - ...securityData, - threatAnalysis: threatAnalysis, - formData: { name, email, hasMessage: !!message, hasSubject: !!subject }, - }, - "contact_submission" - ); - - // CAPTCHA validation - if (!hcaptchaToken) { - await logSecurityEvent( - { - ...securityData, - threatAnalysis: threatAnalysis, - validationResult: "failed", - failureReason: "missing_captcha", - }, - "validation_failure" - ); - - return next(new HttpError("Captcha token missing", 400)); - } - - const valid = await verifyHCaptcha(hcaptchaToken); - if (!valid) { - await logSecurityEvent( - { - ...securityData, - threatAnalysis: threatAnalysis, - validationResult: "failed", - failureReason: "captcha_failed", - }, - "validation_failure" - ); - - return next(new HttpError("Captcha verification failed", 400)); - } - - // High threat handling - if (threatAnalysis.level === "high") { - await logSecurityEvent( - { - ...securityData, - threatAnalysis: threatAnalysis, - action: "blocked_high_threat", - }, - "threat_blocked" - ); - - // Still redirect to thank you to not reveal detection - res.customRedirect("/contact/thankyou"); - return; - } - - // Send email (but flag for review if medium threat) - const emailData = { name, email, message, subject }; - if (threatAnalysis.level === "medium") { - emailData.securityFlag = `[SECURITY REVIEW REQUIRED - Score: ${threatAnalysis.score}]`; - } - - await sendContactMail(emailData); - - // Log successful completion - await logSecurityEvent( - { - ...securityData, - threatAnalysis: threatAnalysis, - processingResult: "success", - emailSent: true, - }, - "contact_success" - ); - - res.customRedirect("/contact/thankyou"); - } catch (err) { - const errorData = captureSecurityData(req, { - error: { - message: err.message, - stack: err.stack, - name: err.name, - }, - processingStep: "error_handling", - }); - - await logSecurityEvent(errorData, "contact_error"); - next(err); - } -}); - -router.get("/contact", async (req, res) => { - const securityData = captureSecurityData(req, { - pageAccess: "contact_form", - processingStep: "page_render", - }); - - await logSecurityEvent(securityData, "page_access"); - - const context = { - csrfToken: res.locals.csrfToken, - title: "Contact", - formAction: qualifyLink("/contact"), - formMethod: "POST", - }; - res.renderWithBaseContext("pages/contact.handlebars", context); -}); - -router.get("/contact/thankyou", async (req, res) => { - const securityData = captureSecurityData(req, { - pageAccess: "thankyou_page", - processingStep: "page_render", - }); - - await logSecurityEvent(securityData, "thankyou_access"); - - res.renderGenericMessage({ - title: "Thank You", - message: - "Your message has been sent successfully. We will get back to you shortly.", - }); -}); +router.get("/contact/thankyou", renderThankYouPage); module.exports = router; diff --git a/src/routes/docs.js b/src/routes/docs.js index 65bc861..934d7e9 100644 --- a/src/routes/docs.js +++ b/src/routes/docs.js @@ -1,145 +1,16 @@ -// src/routes/docs/index.js +// src/routes/docs.js const express = require("express"); -const path = require("path"); -const fs = require("fs/promises"); -const yaml = require("js-yaml"); - const router = express.Router(); -const docsContext = require("../utils/docsContext"); -const HttpError = require("../utils/HttpError"); -const { baseUrl } = require("../utils/baseUrl"); -const { qualifyLink } = require("../utils/qualifyLinks"); +const { + renderDocsIndex, + renderDocsSummary, + renderDocsByType, + renderDocsModule, +} = require("../controllers/docsControllers"); -const { winstonLogger, manualLogger } = require("../utils/logging"); - -const docsDir = path.join(__dirname, "../../content/docs"); -let docsCache = {}; // { [path]: { modules: {}, crossCuttingSummary: {} } } -async function loadDocFile(filePath) { - if (docsCache[filePath]) return docsCache[filePath]; - try { - const fullPath = path.join(docsDir, filePath + ".yaml"); - const fileContent = await fs.readFile(fullPath, "utf-8"); - const parsed = yaml.load(fileContent); - const crossCuttingSummary = parsed["Cross Cutting Summary"] || null; - docsCache[filePath] = { - modules: parsed, - crossCuttingSummary, - }; - return docsCache[filePath]; - } catch (e) { - manualLogger.error(e.stack); - return null; - } -} - -// Helper function to filter security from a single module -function filterModuleSecurityKeys(moduleDoc) { - if (!moduleDoc || typeof moduleDoc !== "object") { - return moduleDoc; - } - - const { securityAndStability, ...moduleWithoutSecurity } = moduleDoc; - return moduleWithoutSecurity; -} -// /docs/summary - aggregate crossCuttingSummary from all cached docs -router.get("/summary", async (req, res, next) => { - const summaries = []; - const files = await fs.readdir(docsDir); - const yamlFiles = files - .filter((f) => f.endsWith(".yaml")) - .map((f) => f.slice(0, -5)); - for (const file of yamlFiles) { - const doc = await loadDocFile(file); - if (doc?.crossCuttingSummary) { - summaries.push({ path: file, summary: doc.crossCuttingSummary }); - } - } - - const context = await docsContext(req.isAuthenticated, { - layout: "docs", - docPath: baseUrl + "/docs/summary", - docModule: null, - }); - - res.render("docs/summary", { - ...context, - summaries, - }); -}); - -// /docs/:path - show all modules in a YAML file -router.get("/:moduleType", async (req, res, next) => { - const { moduleType: docPath } = req.params; - const doc = await loadDocFile(docPath); - - const context = await docsContext(req.isAuthenticated, { - layout: "docs", - docPath: baseUrl + "/docs" + docPath, - docModule: null, - }); - - // Precompute links - const modulesWithLinks = Object.entries(doc.modules).map(([key, value]) => ({ - name: key, - url: `${baseUrl}/docs/${docPath}/${key}`, - })); - - res.render("docs/path", { - ...context, - docsHome: qualifyLink("/docs"), - pathName: docPath, - crossCuttingSummary: doc.crossCuttingSummary, - modules: modulesWithLinks, - }); -}); - -// /docs/:path/:module - show single module from YAML file -router.get("/:moduleType/:module", async (req, res, next) => { - const { moduleType: docPath, module } = req.params; - const doc = await loadDocFile(docPath); - if (!doc) return next(new HttpError("Documentation not found", 404)); - const moduleDoc = doc.modules[module]; - if (!moduleDoc) - return next(new HttpError("Module documentation not found", 404)); - - const context = await docsContext(req.isAuthenticated, { - layout: "docs", - docPath, - docModule: module, - }); - - res.render("docs/module", { - ...context, - docsHome: qualifyLink("/docs"), - pathUrl: qualifyLink("/docs/" + docPath), - pathName: docPath, - module, - moduleDoc: filterModuleSecurityKeys(moduleDoc), - }); -}); - -// /docs - list all doc files -router.get("/", async (req, res, next) => { - try { - const files = await fs.readdir(docsDir); - const yamlFiles = files - .filter((f) => f.endsWith(".yaml")) - .map((f) => f.slice(0, -5)); - - const context = await docsContext(req.isAuthenticated, { - layout: "docs", - docPath: "/docs", - docModule: null, - }); - - res.render("docs/index", { - ...context, - docsPaths: yamlFiles.map((name) => `${req.baseUrl || ""}/${name}`), - }); - } catch (err) { - req.log.error(err.stack); - next(new HttpError("Failed to read docs directory", 500)); - } -}); +router.get("/", renderDocsIndex); +router.get("/summary", renderDocsSummary); +router.get("/:moduleType", renderDocsByType); +router.get("/:moduleType/:module", renderDocsModule); module.exports = router; diff --git a/src/routes/errorPage.js b/src/routes/errorPage.js deleted file mode 100644 index 19f49c0..0000000 --- a/src/routes/errorPage.js +++ /dev/null @@ -1,17 +0,0 @@ -// src/routes/errorPage -const { getErrorContext } = require("../utils/errorContext"); - -module.exports = async (req, res) => { - const code = parseInt(req.query.code, 10) || 500; - const errorContext = getErrorContext(code); - - const context = { - title: errorContext.title, - message: errorContext.message, - statusCode: errorContext.statusCode, - content: "", - }; - - res.status(errorContext.statusCode); - res.renderGenericMessage(context); -}; diff --git a/src/routes/index.js b/src/routes/index.js index 2ede9c2..c1d50a7 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -3,27 +3,25 @@ const router = express.Router(); const path = require("path"); -const analytics = require("./analytics"); -const robots = require("./robots"); -const blog_index = require("./blog_index"); +const analytics = require("../controllers/analyticsControllers"); +const robots = require("../controllers/robotsController"); const csrfToken = require("../middleware/csrfToken"); -const errorPage = require("./errorPage"); +const errorPage = require("../controllers/errorPageController"); const admin = require("./admin"); const tags = require("./tags"); const presentation = require("./presentation"); const contact = require("./contact"); const sitemap = require("./sitemap"); -const post = require("./post"); +const { blogPost, blogIndex } = require("../controllers/blogControllers"); const pages = require("./pages"); const docs = require("./docs"); -const rssFeed = require("./rssFeed"); -const { qualifyLink } = require("../utils/qualifyLinks"); +const rssFeedController = require("../controllers/rssFeedController"); const HttpError = require("../utils/HttpError"); const securedMiddleware = require("../middleware/secured"); const securedRoutes = require("./secured"); -const stack = require("./stack"); +const stack = require("../controllers/techkStackController"); const favicon = require("serve-favicon"); const faviconsPath = path.join(__dirname, "..", "..", "public", "favicons"); @@ -38,7 +36,6 @@ router.get("/error", errorPage); // Landing page after error is logged router.use(admin); -router.use(stack); router.post("/track", analytics); router.post("/analytics", analytics); @@ -84,17 +81,20 @@ router.use("/favicons", express.static(faviconsPath)); router.use(favicon(faviconFile)); -router.use(blog_index); -router.use(robots); router.use(contact, csrfToken); router.use(sitemap); router.use(pages); -router.use(rssFeed); router.use(tags); router.use("/projects/website-presentation", presentation); router.use("/docs", docs); -router.get("/blog/:year/:month/:name", post); +router.get("/blog/:year/:month/:name", blogPost); +router.get("/blog", blogIndex); + +router.get("/stack", stack); + +router.get("/robots.txt", robots); +router.get("/rss-feed.xml", rssFeedController); router.get("/", (req, res) => { res.customRedirect("/blog", 301); diff --git a/src/routes/newsletter.js b/src/routes/newsletter.js index 48c6aa6..b6f369e 100644 --- a/src/routes/newsletter.js +++ b/src/routes/newsletter.js @@ -1,78 +1,16 @@ const express = require("express"); const router = express.Router(); -const sendNewsletterSubscriptionMail = require("../utils/sendNewsletterSubscriptionMail"); -const { saveEmail } = require("../services/newsletterService"); const formLimiter = require("../utils/formLimiter"); -const { unsubscribeEmail } = require("../services/newsletterService"); -const { ERRORS } = require("../constants/newsletterConstants"); - const { - validateAndSanitizeEmail, - MESSAGES, -} = require("../utils/emailValidator"); + renderNewsletterForm, + renderSubscriptionSuccess, + handleNewsletterSubscribe, + handleUnsubscribe, +} = require("../controllers/newsletterController"); -const { qualifyLink } = require("../utils/qualifyLinks"); - -router.get("/newsletter", async (req, res) => { - const context = { - csrfToken: res.locals.csrfToken, - title: "Newsletter", - formAction: qualifyLink("/newsletter"), - formMethod: "POST", - }; - res.renderWithBaseContext("pages/newsletter.handlebars", context); -}); - -router.get("/newsletter/success", async (req, res) => { - const context = { - title: "Unsubscribed", - message: - "You’ve successfully subscribed to my newsletter. Stay tuned for updates.", - }; - res.renderWithBaseContext("pages/newsletter-success.handlebars", context); -}); - -router.post("/newsletter", formLimiter, async (req, res, next) => { - const { email: rawEmail } = req.body; - const result = validateAndSanitizeEmail(rawEmail); - - if (!result.valid) { - return next(new HttpError(result.message, 400)); - } - - const sanitizedEmail = result.email; - - try { - await saveEmail(sanitizedEmail); - await sendNewsletterSubscriptionMail({ email: sanitizedEmail }); - res.customRedirect("/newsletter/success"); - } catch (err) { - req.log.error("Newsletter subscription error:", err); - if (err.code === "DUPLICATE_EMAIL") { - return res.customRedirect("/newsletter/success"); - } - next(err); - } -}); - -router.get("/unsubscribe", async (req, res) => { - const { valid, email, message } = validateAndSanitizeEmail(req.query.email); - - if (!valid) { - return next(new HttpError(message || ERRORS.INVALID_EMAIL, 400)); - } - - try { - await unsubscribeEmail(email); - const context = { - title: "Thank You", - message: - "You’ve been successfully removed from the newsletter mailing list.", - }; - res.renderGenericMessage(context); - } catch (err) { - next(new HttpError({ error: "Failed to unsubscribe" }, 500)); - } -}); +router.get("/newsletter", renderNewsletterForm); +router.get("/newsletter/success", renderSubscriptionSuccess); +router.post("/newsletter", formLimiter, handleNewsletterSubscribe); +router.get("/unsubscribe", handleUnsubscribe); module.exports = router; diff --git a/src/routes/pages.js b/src/routes/pages.js index 07499bf..21e6217 100644 --- a/src/routes/pages.js +++ b/src/routes/pages.js @@ -24,8 +24,6 @@ construction.register("/changelog", "Changelog"); construction.register("/archive", "Archive"); -// construction.register("/tags", "Tags"); -// construction.register("/contact", "Contact Me"); markdown.register("/tools", "tools", "tools"); markdown.register("/about/me", "about-me"); diff --git a/src/routes/post.js b/src/routes/post.js deleted file mode 100644 index 3ad9680..0000000 --- a/src/routes/post.js +++ /dev/null @@ -1,56 +0,0 @@ -// src/routes/post.js -const { marked } = require("marked"); -const fs = require("fs").promises; -const path = require("path"); -const matter = require("gray-matter"); - -const HttpError = require("../utils/HttpError"); - -module.exports = async (req, res, next) => { - const { year, month, name } = req.params; - - // Validate year: 4 digits only - if (!/^\d{4}$/.test(year)) { - return next(new HttpError("Invalid year parameter.", 400)); - } - - // Validate month: 01-12 only - if (!/^(0[1-9]|1[0-2])$/.test(month)) { - return next(new HttpError("Invalid month parameter.", 400)); - } - - // Validate name: allow alphanumeric, dash, underscore only (no dots, no slashes) - if (!/^[a-zA-Z0-9_-]+$/.test(name)) { - return next(new HttpError("Invalid post name parameter.", 400)); - } - - const mdPath = path.join( - __dirname, - "../../content/posts", - year, - month, - `${name}.md` - ); - - try { - const fileContent = await fs.readFile(mdPath, "utf8"); - const { data: frontmatter, content } = matter(fileContent); - if ( - !frontmatter.published && - (process.env.NODE_ENV === "production" || - process.env.NODE_ENV === "testing") - ) { - throw new Error("Attempted to access an unpublished page in production"); - } - const htmlContent = marked(content); - const context = { - title: frontmatter.title, - date: frontmatter.date, - author: frontmatter.author, - content: htmlContent, - }; - res.renderWithBaseContext("pages/post", context); - } catch (err) { - next(new HttpError("The requested blog post could not be found.", 404)); - } -}; diff --git a/src/routes/presentation.js b/src/routes/presentation.js index 3a07f91..ed31eae 100644 --- a/src/routes/presentation.js +++ b/src/routes/presentation.js @@ -1,43 +1,10 @@ -// src/routes/presentation.js const express = require("express"); -const path = require("path"); -const fs = require("fs/promises"); -const yaml = require("js-yaml"); - const router = express.Router(); -const HttpError = require("../utils/HttpError"); -const { qualifyLink } = require("../utils/qualifyLinks"); -const { baseUrl } = require("../utils/baseUrl"); -const { CSP_DIRECTIVES } = require("../constants/securityConstants"); + +const { renderPresentation } = require("../controllers/presentationController"); +const resolveReturnUrl = require("../middleware/resolveReturnUrl"); const { securityPolicy } = require("../middleware/applyProductionSecurity"); - -const yamlPath = path.resolve("content/presentation.yaml"); -function resolveReturnUrl(req, res, next) { - const myDomain = "jasonpoage.com"; - const fallbackUrl = baseUrl; - const referrer = req.body?.referrer; - - req.returnUrl = fallbackUrl; - - if (typeof referrer !== "string") return next(); - - try { - const url = new URL(referrer); - - const isSameDomain = url.hostname.endsWith(myDomain); - const isNotPresentation = !url.pathname.includes( - "/projects/website-presentation" - ); - - if (isSameDomain && isNotPresentation) { - req.returnUrl = referrer; - } - } catch { - // Invalid referrer, keep fallback - } - - next(); -} +const { CSP_DIRECTIVES } = require("../constants/securityConstants"); router.get( "/", @@ -46,37 +13,7 @@ scriptSrc: [...CSP_DIRECTIVES.scriptSrc, "'unsafe-eval'"], styleSrc: [...CSP_DIRECTIVES.styleSrc, "'unsafe-inline'"], }), - async (req, res, next) => { - try { - const fileContent = await fs.readFile(yamlPath, "utf8"); - const data = yaml.load(fileContent); - - if (data.slides) { - for (const slide of data.slides) { - if (slide.images) { - slide.images = slide.images.map((img) => { - if (img.src && !img.src.match(/^https?:\/\//)) { - img.src = qualifyLink(img.src); - } - return img; - }); - } - } - } - - res.render("pages/presentation", { - layout: "presentation", - slides: data.slides, - title: data.title, - baseUrl, - returnUrl: req.returnUrl, - nonce: res.locals.nonce, - }); - } catch (err) { - req.log.error(err.stack); - next(new HttpError("Failed to load presentation data", 500)); - } - } + renderPresentation ); module.exports = router; diff --git a/src/routes/robots.js b/src/routes/robots.js deleted file mode 100644 index 5831af9..0000000 --- a/src/routes/robots.js +++ /dev/null @@ -1,16 +0,0 @@ -const express = require("express"); -const router = express.Router(); - -router.get("/robots.txt", (req, res) => { - const robotsTxt = ` -User-agent: * -Disallow: - -Sitemap: ${req.protocol}://${req.get("host")}/sitemap.xml -`.trim(); - - res.type("text/plain"); - res.send(robotsTxt); -}); - -module.exports = router; diff --git a/src/routes/rssFeed.js b/src/routes/rssFeed.js deleted file mode 100644 index 255e28a..0000000 --- a/src/routes/rssFeed.js +++ /dev/null @@ -1,13 +0,0 @@ -// routes/rss.js -const express = require("express"); -const router = express.Router(); -const generateRSSFeed = require("../services/rssFeedService"); - -router.get("/rss-feed.xml", async (req, res) => { - const domain = process.env.DOMAIN; - const xml = await generateRSSFeed("content/posts", `https://${domain}`); - res.set("Content-Type", "application/rss+xml"); - res.send(xml); -}); - -module.exports = router; diff --git a/src/routes/secured/filteredLogs.js b/src/routes/secured/filteredLogs.js index 6b76936..282f390 100644 --- a/src/routes/secured/filteredLogs.js +++ b/src/routes/secured/filteredLogs.js @@ -1,92 +1,6 @@ const router = require("express").Router(); -const fs = require("fs"); +const { getFilteredLogs } = require("../controllers/filteredLogsController"); -const excludeIps = new Set(["192.168.1.50", "73.19.173.54"]); +router.get("/filtered-logs", getFilteredLogs); -// Use this function to flatten the Express router stack -function flattenRouterLayers(stack, acc = []) { - for (const layer of stack) { - acc.push(layer); - const h = layer.handle; - if (typeof h === "function") { - if (h.stack && Array.isArray(h.stack)) { - flattenRouterLayers(h.stack, acc); - } else if (h.handle && h.handle.stack && Array.isArray(h.handle.stack)) { - flattenRouterLayers(h.handle.stack, acc); - } - } - } - return acc; -} - -// Collect excludeRoutes from Express router layers -function getExcludeRoutes(router) { - const rootStack = router.stack; - const flat = flattenRouterLayers(rootStack); - const routes = []; - for (const l of flat) { - if (l.route && l.route.path) { - routes.push(l.route.path); - } - } - return routes; -} - -function shouldExclude(ip, url, excludeRoutes) { - if (excludeIps.has(ip)) return true; - - for (const route of excludeRoutes) { - if ( - route.includes(":token") || - route.includes(":year") || - route.includes(":month") || - route.includes(":name") - ) { - const routePrefix = route.split(":")[0]; - if (url.startsWith(routePrefix)) return true; - } else { - if (url === route || url.startsWith(route)) return true; - } - } - return false; -} - -function parseLogLine(line) { - const parts = line.split(" "); - if (parts.length < 1) return null; - const ip = parts[0]; - - const match = line.match(/"([^"]*)"/); - if (!match) return null; - - const request = match[1].split(" "); - if (request.length < 2) return null; - - return { ip, url: request[1] }; -} - -// Route that returns filtered logs as plaintext -router.get("/filtered-logs", (req, res) => { - const excludeRoutes = getExcludeRoutes(req.app._router); - const logPath = "/var/log/nginx/access.log"; - - try { - const input = fs.readFileSync(logPath, "utf8"); - const lines = input.split("\n"); - const filtered = []; - - for (const line of lines) { - if (!line.trim()) continue; - const parsed = parseLogLine(line); - if (!parsed) continue; - - if (!shouldExclude(parsed.ip, parsed.url, excludeRoutes)) { - filtered.push(line); - } - } - - res.type("text/plain").status(200).send(filtered.join("\n")); - } catch { - res.sendStatus(500); - } -}); +module.exports = router; diff --git a/src/routes/secured/logs.js b/src/routes/secured/logs.js index 604a808..e23a71a 100644 --- a/src/routes/secured/logs.js +++ b/src/routes/secured/logs.js @@ -1,219 +1,9 @@ const express = require("express"); const router = express.Router(); -const Database = require("better-sqlite3"); -const path = require("path"); -const fs = require("fs"); +const controller = require("../../controllers/secured/logsController"); -const { winstonLogger } = require("../../utils/logging"); - -const allowedLevels = ["warn", "error", "info", "debug", "functions", "notice"]; - -const logsDbPath = path.resolve(__dirname, "../../../data/logs.sqlite3"); - -if (!fs.existsSync(logsDbPath)) { - fs.closeSync(fs.openSync(logsDbPath, "w")); -} - -const logsDb = new Database(logsDbPath, { readonly: true }); -const analyticsDb = require("../../utils/sqlite3"); - -router.get("/logs", (req, res) => { - res.renderWithBaseContext("admin-pages/logs", { - showSidebar: false, - showFooter: false, - }); -}); - -router.post("/logs", (req, res) => { - const log_level = req.query.log_level || "*"; - const date = req.query.date || "*"; - const page = parseInt(req.query.page) || 1; - const limit = parseInt(req.query.limit) || 50; - const offset = (page - 1) * limit; - - if (log_level !== "*" && !allowedLevels.includes(log_level)) { - return res.status(400).json({ error: "Invalid log_level" }); - } - - const conditions = []; - const params = []; - - if (log_level !== "*") { - conditions.push("level = ?"); - params.push(log_level); - } - - if (date !== "*") { - conditions.push("date(timestamp) = ?"); - params.push(date); - } - - const whereClause = conditions.length - ? "WHERE " + conditions.join(" AND ") - : ""; - - // Count query - simple and fast - const countQuery = `SELECT COUNT(*) as total FROM logs ${whereClause}`; - const totalResult = logsDb.prepare(countQuery).get(...params); - const total = totalResult.total; - - // STEP 1: Get just the log records we need (fast!) - const logQuery = ` - SELECT id, timestamp, level - FROM logs - ${whereClause} - ORDER BY timestamp DESC - LIMIT ? OFFSET ? - `; - - try { - const logRows = logsDb.prepare(logQuery).all(...params, limit, offset); - - if (logRows.length === 0) { - return res.json({ - logs: [], - pagination: { page, limit, total, totalPages: 0, hasMore: false }, - }); - } - - // STEP 2: Get metadata only for these specific logs - const logIds = logRows.map((row) => row.id); - const placeholders = logIds.map(() => "?").join(","); - - const metadataQuery = ` - SELECT - m.log_id, - k.key, - m.value - FROM log_metadata m - JOIN keys k ON k.id = m.key_id - WHERE m.log_id IN (${placeholders}) - `; - - const metadataRows = logsDb.prepare(metadataQuery).all(...logIds); - - // STEP 3: Build metadata lookup map - const metadataMap = {}; - metadataRows.forEach((row) => { - if (!metadataMap[row.log_id]) { - metadataMap[row.log_id] = {}; - } - try { - metadataMap[row.log_id][row.key] = JSON.parse(row.value); - } catch { - metadataMap[row.log_id][row.key] = row.value; - } - }); - - // STEP 4: Combine logs with their metadata - const logs = logRows.map((row) => ({ - id: row.id, - timestamp: row.timestamp, - level: row.level, - ...(metadataMap[row.id] || {}), - })); - - res.json({ - logs, - pagination: { - page, - limit, - total, - totalPages: Math.ceil(total / limit), - hasMore: page < Math.ceil(total / limit), - }, - }); - } catch (error) { - winstonLogger.error("Query error:", error); - res.status(500).json({ error: "Failed to query logs" }); - } -}); -router.post("/logs/analytics", (req, res) => { - // const event = req.query.event || "*"; - // const date = req.query.date || "*"; - const page = parseInt(req.query.page) || 1; - const limit = parseInt(req.query.limit) || 50; - const offset = (page - 1) * limit; - - if (page < 1 || limit < 1) { - return res.status(400).json({ error: "Invalid pagination parameters" }); - } - - const conditions = []; - const params = []; - - // Uncomment and modify these when you want to add filters - // if (event !== "*") { - // conditions.push("event = ?"); - // params.push(event); - // } - // if (date !== "*") { - // conditions.push("date(timestamp_human) = ?"); - // params.push(date); - // } - - const whereClause = conditions.length - ? "WHERE " + conditions.join(" AND ") - : ""; - - try { - // Count total matching rows - const countQuery = `SELECT COUNT(*) as total FROM analytics_view ${whereClause}`; - - analyticsDb.get(countQuery, params, (err, totalResult) => { - if (err) { - winstonLogger.error("Count query error:", err); - return res.status(500).json({ error: "Failed to query logs" }); - } - - const total = totalResult.total; - - // Query logs with pagination - const logsQuery = ` - SELECT - id, - timestamp_human AS timestamp, - url, - referrer, - user_agent, - viewport, - load_time, - event, - forwardedIp, - directIp, - js_enabled - FROM analytics_view - ${whereClause} - ORDER BY timestamp DESC - LIMIT ? OFFSET ? - `; - - const queryParams = [...params, limit, offset]; - - analyticsDb.all(logsQuery, queryParams, (err, logs) => { - if (err) { - winstonLogger.error("Logs query error:", err); - return res.status(500).json({ error: "Failed to query logs" }); - } - - const totalPages = Math.ceil(total / limit); - - res.json({ - logs, - pagination: { - page, - limit, - total, - totalPages, - hasMore: page < totalPages, - }, - }); - }); - }); - } catch (error) { - winstonLogger.error("Query error:", error); - res.status(500).json({ error: "Failed to query logs" }); - } -}); +router.get("/logs", controller.renderLogsPage); +router.post("/logs", controller.fetchLogs); +router.post("/logs/analytics", controller.fetchAnalyticsLogs); module.exports = router; diff --git a/src/routes/secured/routesList.js b/src/routes/secured/routesList.js deleted file mode 100644 index 68e4e8a..0000000 --- a/src/routes/secured/routesList.js +++ /dev/null @@ -1,39 +0,0 @@ -// src/routes/secured/routesList.js -const express = require("express"); -const { getRoutes, refreshRoutes } = require("../../middleware/routesList"); - -const router = express.Router(); - -router.get("/routes", (req, res) => { - try { - const routes = getRoutes(); - res.json({ - count: routes.length, - routes: routes, - }); - } catch (error) { - res.status(500).json({ - error: "Failed to retrieve routes", - message: error.message, - }); - } -}); - -// Optional: endpoint to refresh the route cache -router.post("/routes/refresh", (req, res) => { - try { - const routes = refreshRoutes(); - res.json({ - message: "Routes refreshed", - count: routes.length, - routes: routes, - }); - } catch (error) { - res.status(500).json({ - error: "Failed to refresh routes", - message: error.message, - }); - } -}); - -module.exports = router; diff --git a/src/routes/sitemap.js b/src/routes/sitemap.js index 76919a1..5a241ab 100644 --- a/src/routes/sitemap.js +++ b/src/routes/sitemap.js @@ -1,51 +1,10 @@ -// src/routes/sitemap.js +// src/routes/sitemap. const express = require("express"); const router = express.Router(); -const fs = require("fs"); -const path = require("path"); -const Handlebars = require("handlebars"); -const sitemapService = require("../services/sitemapService"); -const { qualifyLink } = require("../utils/qualifyLinks.js"); +const sitemapController = require("../controllers/sitemapControllers"); -// Precompile XML template once -const xmlTplSrc = fs.readFileSync( - path.resolve(__dirname, "../views/pages/sitemap-xml.handlebars"), - "utf-8" -); -const xmlTpl = Handlebars.compile(xmlTplSrc); - -// HTML sitemap page -router.get("/sitemap", async (req, res) => { - const context = { - title: "Site Map", - sitemap: await sitemapService.getCompleteSitemap(), - }; - res.renderWithBaseContext("pages/sitemap", context); -}); - -// HTML sitemap page -router.get("/sitemap.json", async (req, res) => { - const context = { - title: "Site Map", - sitemap: await sitemapService.getCompleteSitemap(), - }; - res.json(context); -}); - -// XML sitemap endpoint -router.get("/sitemap.xml", async (req, res) => { - const urls = await sitemapService.getAllUrls(); - - // Format URLs for XML template - const formattedUrls = urls.map((url) => ({ - loc: qualifyLink(url.loc), - lastmod: url.lastmod, - changefreq: url.changefreq, - priority: url.priority, - })); - - const xml = xmlTpl({ urls: formattedUrls }); - res.type("application/xml").send(xml); -}); +router.get("/sitemap", sitemapController.getSitemapHtml); +router.get("/sitemap.json", sitemapController.getSitemapJson); +router.get("/sitemap.xml", sitemapController.getSitemapXml); module.exports = router; diff --git a/src/routes/stack.js b/src/routes/stack.js deleted file mode 100644 index effc74d..0000000 --- a/src/routes/stack.js +++ /dev/null @@ -1,24 +0,0 @@ -// routes/stack.js -const express = require("express"); -const router = express.Router(); -const HttpError = require("../utils/HttpError"); // Adjust path as needed -const techStack = require("../../content/techStack.json"); // JSON file from previous message -const { baseUrl } = require("../utils/baseUrl"); -router.get("/stack", (req, res, next) => { - try { - const techWithBase = techStack.map((item) => ({ - ...item, - png: baseUrl + item.png, - svg: baseUrl + item.svg, - })); - res.renderWithBaseContext("pages/stack", { - tech: techWithBase, - }); - } catch (err) { - next( - new HttpError("Failed to load tech stack", 500, { originalError: err }) - ); - } -}); - -module.exports = router; diff --git a/src/routes/tags.js b/src/routes/tags.js index 54035fd..9301a62 100644 --- a/src/routes/tags.js +++ b/src/routes/tags.js @@ -1,43 +1,12 @@ const express = require("express"); -const { getPostsByTag } = require("../services/tagsService"); -const { getAllTags } = require("../services/sitemapService"); -const HttpError = require("../utils/HttpError"); +const { + renderTagsPage, + renderTagPostsPage, +} = require("../controllers/tagsController"); const router = express.Router(); -router.get("/tags", async (req, res, next) => { - try { - const tags = await getAllTags(); - const context = { tags }; - res.renderWithBaseContext("pages/tags", context); - } catch (err) { - next(err); - } -}); -function normalizeTag(tag) { - return tag - .trim() - .toLowerCase() - .replace(/[\s-]+/g, " "); -} - -router.get("/tags/:tag", async (req, res, next) => { - const tag = req.params.tag; - const normalizedTag = normalizeTag(tag); - - // Replace with your data source logic to fetch posts by tag - const posts = await getPostsByTag(tag); - - if (!posts || posts.length === 0) { - return next(new HttpError("No posts found for this tag.", 404)); - } - - const context = { - tag: normalizedTag, - posts, - }; - - res.renderWithBaseContext("pages/tag-posts", context); -}); +router.get("/tags", renderTagsPage); +router.get("/tags/:tag", renderTagPostsPage); module.exports = router; diff --git a/src/routes/testing/index.js b/src/routes/testing/index.js deleted file mode 100644 index 42fc72f..0000000 --- a/src/routes/testing/index.js +++ /dev/null @@ -1,6 +0,0 @@ -const router = require("express").Router(); -const routes = require("./routes"); - -router.use(routes); - -module.exports = router; diff --git a/src/routes/testing/routes.js b/src/routes/testing/routes.js deleted file mode 100644 index bcbed8d..0000000 --- a/src/routes/testing/routes.js +++ /dev/null @@ -1,37 +0,0 @@ -// src/routes/index.js -const express = require("express"); -const router = express.Router(); - -function flattenRouterLayers(stack, acc = []) { - for (const layer of stack) { - acc.push(layer); - const h = layer.handle; - console.log(layer); - if (typeof h === "function") { - if (h.stack && Array.isArray(h.stack)) { - flattenRouterLayers(h.stack, acc); - } else if (h.handle && h.handle.stack && Array.isArray(h.handle.stack)) { - flattenRouterLayers(h.handle.stack, acc); - } - } - } - return acc; -} - -router.get("/routes", (req, res) => { - const rootStack = req.app._router?.stack || req.app.router?.stack; - if (!rootStack) return res.sendStatus(500); - const flat = flattenRouterLayers(rootStack); - const routes = []; - flat.forEach((layer) => { - if (layer.route && layer.route.path && layer.route.methods) { - routes.push({ - path: layer.route.path, - methods: Object.keys(layer.route.methods).map((m) => m.toUpperCase()), - }); - } - }); - res.status(200).json(routes); -}); - -module.exports = router; diff --git a/src/services/docsService.js b/src/services/docsService.js new file mode 100644 index 0000000..8441812 --- /dev/null +++ b/src/services/docsService.js @@ -0,0 +1,43 @@ +const fs = require("fs/promises"); +const path = require("path"); +const yaml = require("js-yaml"); +const { winstonLogger } = require("../utils/logging"); + +const docsDir = path.join(__dirname, "../../content/docs"); +const docsCache = {}; // { [path]: { modules: {}, crossCuttingSummary: {} } } + +async function loadDocFile(filePath) { + if (docsCache[filePath]) return docsCache[filePath]; + try { + const fullPath = path.join(docsDir, filePath + ".yaml"); + const fileContent = await fs.readFile(fullPath, "utf-8"); + const parsed = yaml.load(fileContent); + const crossCuttingSummary = parsed["Cross Cutting Summary"] || null; + docsCache[filePath] = { + modules: parsed, + crossCuttingSummary, + }; + return docsCache[filePath]; + } catch (e) { + winstonLogger.error(e.stack); + return null; + } +} + +function filterModuleSecurityKeys(moduleDoc) { + if (!moduleDoc || typeof moduleDoc !== "object") return moduleDoc; + const { securityAndStability, ...moduleWithoutSecurity } = moduleDoc; + return moduleWithoutSecurity; +} + +async function getYamlFileNames() { + const files = await fs.readdir(docsDir); + return files.filter((f) => f.endsWith(".yaml")).map((f) => f.slice(0, -5)); +} + +module.exports = { + docsDir, + loadDocFile, + filterModuleSecurityKeys, + getYamlFileNames, +}; diff --git a/src/utils/normalize.js b/src/utils/normalize.js new file mode 100644 index 0000000..7311295 --- /dev/null +++ b/src/utils/normalize.js @@ -0,0 +1,6 @@ +exports.normalizeTag = function (tag) { + return tag + .trim() + .toLowerCase() + .replace(/[\s-]+/g, " "); +};