import crypto from "node:crypto"; import fs from "node:fs"; import path from "node:path"; import url from "node:url"; import { spawn } from "node:child_process"; import express from "express"; import multer from "multer"; const __filename = url.fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const projectRoot = path.resolve(__dirname, ".."); const distDir = path.join(projectRoot, "dist"); const indexHtmlPath = path.join(distDir, "index.html"); const privateUploadToken = process.env.PRIVATE_UPLOAD_TOKEN || ""; const privateUploadMaxBytes = Number(process.env.PRIVATE_UPLOAD_MAX_BYTES || 25 * 1024 * 1024); const privateUploadStyle = process.env.PRIVATE_UPLOAD_STYLE || "if.dave.v1.2"; const revoiceRepoRoot = path.resolve(projectRoot, "..", ".."); function escapeHtml(value) { return String(value || "") .replaceAll("&", "&") .replaceAll("<", "<") .replaceAll(">", ">") .replaceAll('"', """) .replaceAll("'", "'"); } function ensureDir(dirPath) { fs.mkdirSync(dirPath, { recursive: true }); } function looksLikeUuid(value) { return /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(String(value || "")); } function jobJsonPath(jobsDir, jobId) { return path.join(jobsDir, `${jobId}.json`); } function readJob(jobsDir, jobId) { const p = jobJsonPath(jobsDir, jobId); if (!fs.existsSync(p)) return null; try { return JSON.parse(fs.readFileSync(p, "utf8")); } catch { return null; } } function writeJob(jobsDir, job) { const p = jobJsonPath(jobsDir, job.id); fs.writeFileSync(p, JSON.stringify(job, null, 2) + "\n", "utf8"); } async function sha256File(filePath) { return await new Promise((resolve, reject) => { const h = crypto.createHash("sha256"); const s = fs.createReadStream(filePath); s.on("error", reject); s.on("data", (chunk) => h.update(chunk)); s.on("end", () => resolve(h.digest("hex"))); }); } function runProcess(command, args, { cwd, env }) { return new Promise((resolve) => { const child = spawn(command, args, { cwd, env, stdio: ["ignore", "pipe", "pipe"], }); let stdout = ""; let stderr = ""; child.stdout.on("data", (d) => { stdout += d.toString("utf8"); if (stdout.length > 256_000) stdout = stdout.slice(-256_000); }); child.stderr.on("data", (d) => { stderr += d.toString("utf8"); if (stderr.length > 256_000) stderr = stderr.slice(-256_000); }); child.on("error", (err) => { const msg = err?.message ? String(err.message) : String(err); resolve({ code: 127, stdout, stderr: (stderr ? `${stderr}\n` : "") + msg }); }); child.on("close", (code) => resolve({ code: code ?? 0, stdout, stderr })); }); } async function generateShadowDossier({ inputPath, outputPath }) { const revoiceModule = path.join(revoiceRepoRoot, "src", "revoice"); if (!fs.existsSync(revoiceModule)) { throw new Error(`Missing revoice pipeline at ${revoiceModule}`); } const baseEnv = { ...process.env, PYTHONPATH: path.join(revoiceRepoRoot, "src"), }; const gen = await runProcess( "python3", ["-m", "revoice", "generate", "--style", privateUploadStyle, "--input", inputPath, "--output", outputPath], { cwd: revoiceRepoRoot, env: baseEnv } ); if (gen.code !== 0) { throw new Error(`revoice generate failed (code ${gen.code}): ${gen.stderr || gen.stdout}`); } const preflight = await runProcess( "python3", ["-m", "revoice", "preflight", "--style", privateUploadStyle, "--input", outputPath, "--source", inputPath], { cwd: revoiceRepoRoot, env: baseEnv } ); const warnings = preflight.code === 0 ? "" : preflight.stderr || preflight.stdout; if (preflight.code !== 0 && preflight.code !== 2) { throw new Error(`revoice preflight failed (code ${preflight.code}): ${preflight.stderr || preflight.stdout}`); } return { warnings }; } function pickPhrases(input) { const text = String(input || "").replace(/\r\n?/g, "\n"); const lines = text .split("\n") .map((l) => l.trim()) .filter(Boolean) .slice(0, 200); const interesting = []; const needles = ["must", "should", "require", "required", "ensure", "enforce", "policy", "control", "audit", "compliance"]; for (const line of lines) { const lower = line.toLowerCase(); if (needles.some((n) => lower.includes(n))) interesting.push(line); if (interesting.length >= 4) break; } if (interesting.length) return interesting; return lines.slice(0, 3); } function generateRoastText(content) { const trimmed = String(content || "").trim(); const phrases = pickPhrases(trimmed); const bullets = phrases.map((p) => `- ${p.length > 120 ? `${p.slice(0, 117)}…` : p}`).join("\n"); return [ "We love the ambition here and are directionally aligned with the idea of \"secure rollout\" as long as we define secure as \"documented\" and rollout as \"phased.\"", "", "Key risk: this reads like a control narrative optimized for sign-off, not for the Friday-afternoon pull request that actually ships the code.", "", "Observed control theater (excerpt):", bullets ? bullets : "- (no extractable claims detected)", "", "Recommendation: convert every \"should\" into an owner, a gate (PR/CI/access), and a stop condition. Otherwise this becomes an alignment session that reproduces itself indefinitely.", ].join("\n"); } function main() { const port = Number(process.env.PORT || 8080); const app = express(); const dataDir = path.join(projectRoot, "data"); const uploadsDir = path.join(dataDir, "uploads"); const outputsDir = path.join(dataDir, "outputs"); const jobsDir = path.join(dataDir, "jobs"); ensureDir(uploadsDir); ensureDir(outputsDir); ensureDir(jobsDir); app.disable("x-powered-by"); app.use(express.json({ limit: "256kb" })); app.get("/healthz", (_req, res) => { res.status(200).json({ ok: true }); }); app.post("/api/roast", (req, res) => { const content = String(req.body?.content ?? ""); if (!content.trim()) return res.status(400).json({ text: "Missing content" }); if (content.length > 20_000) return res.status(413).json({ text: "Content too large" }); return res.status(200).json({ text: generateRoastText(content) }); }); const privateUploadEnabled = Boolean(privateUploadToken.trim()); const privateGuard = (req, res, next) => { if (!privateUploadEnabled) return res.status(404).type("text/plain").send("Not found"); if (req.params?.token !== privateUploadToken) return res.status(404).type("text/plain").send("Not found"); return next(); }; const upload = multer({ storage: multer.diskStorage({ destination: (_req, _file, cb) => cb(null, uploadsDir), filename: (req, file, cb) => { const id = crypto.randomUUID(); req._jobId = id; const ext = path.extname(file.originalname || "").slice(0, 12).toLowerCase(); cb(null, `${id}${ext}`); }, }), limits: { fileSize: privateUploadMaxBytes, files: 1 }, }); app.get("/private/:token", privateGuard, (req, res) => { const token = req.params.token; res .status(200) .type("text/html; charset=utf-8") .send( [ "", "", "", "", "Private Upload · Shadow Dossier", "", "", "

Private dossier upload

", `

Style: ${escapeHtml(privateUploadStyle)} · Max: ${escapeHtml( String(privateUploadMaxBytes) )} bytes

`, "
", "
", "

", "

", "

Supported: PDF/MD/TXT. Output: Markdown shadow dossier.

", "
", "
", "", ].join("") ); }); app.get("/private/:token/job/:jobId", privateGuard, (req, res) => { const jobId = String(req.params.jobId || ""); if (!looksLikeUuid(jobId)) return res.status(404).type("text/plain").send("Not found"); const job = readJob(jobsDir, jobId); if (!job) return res.status(404).type("text/plain").send("Not found"); const status = String(job.status || "unknown"); const isDone = status === "done" || status === "done_with_warnings"; const isError = status === "error"; const token = req.params.token; const refresh = isDone || isError ? "" : ""; const downloadLink = isDone ? `

Download shadow dossier

` : ""; const sourceLink = job.sourcePath ? `

Download source

` : ""; const warnings = job.warnings ? `
${escapeHtml(job.warnings)}
` : ""; const error = job.error ? `
${escapeHtml(job.error)}
` : ""; res .status(200) .type("text/html; charset=utf-8") .send( [ "", "", "", "", refresh, "Job · Shadow Dossier", "", "", "

Shadow dossier job

", `

Status: ${escapeHtml(status)}

`, `

Job ID: ${escapeHtml(jobId)}

`, job.originalFilename ? `

Source: ${escapeHtml(job.originalFilename)}

` : "", job.sourceSha256 ? `

Source sha256: ${escapeHtml(job.sourceSha256)}

` : "", job.outputSha256 ? `

Output sha256: ${escapeHtml(job.outputSha256)}

` : "", downloadLink, sourceLink, warnings ? "

Warnings

" + warnings : "", isError ? "

Error

" + error : "", `

Back to upload

`, "", ].join("") ); }); app.get("/private/:token/download/:jobId", privateGuard, (req, res) => { const jobId = String(req.params.jobId || ""); if (!looksLikeUuid(jobId)) return res.status(404).type("text/plain").send("Not found"); const job = readJob(jobsDir, jobId); if (!job) return res.status(404).type("text/plain").send("Not found"); if (!job.outputPath) return res.status(409).type("text/plain").send("Not ready"); const abs = path.resolve(projectRoot, job.outputPath); if (!abs.startsWith(outputsDir + path.sep)) return res.status(400).type("text/plain").send("Bad path"); if (!fs.existsSync(abs)) return res.status(404).type("text/plain").send("Not found"); const baseName = (job.originalFilename || "dossier").replace(/[^A-Za-z0-9._-]+/g, "-").slice(0, 60); res.download(abs, `${baseName}.shadow.dave.md`); }); app.get("/private/:token/source/:jobId", privateGuard, (req, res) => { const jobId = String(req.params.jobId || ""); if (!looksLikeUuid(jobId)) return res.status(404).type("text/plain").send("Not found"); const job = readJob(jobsDir, jobId); if (!job) return res.status(404).type("text/plain").send("Not found"); if (!job.sourcePath) return res.status(404).type("text/plain").send("Not found"); const abs = path.resolve(projectRoot, job.sourcePath); if (!abs.startsWith(uploadsDir + path.sep)) return res.status(400).type("text/plain").send("Bad path"); if (!fs.existsSync(abs)) return res.status(404).type("text/plain").send("Not found"); const baseName = (job.originalFilename || "source").replace(/[^A-Za-z0-9._-]+/g, "-").slice(0, 80); res.download(abs, baseName); }); app.post("/api/private/:token/upload", privateGuard, upload.single("file"), async (req, res) => { const jobId = req._jobId || crypto.randomUUID(); const file = req.file; if (!file?.path) return res.status(400).type("text/plain").send("Missing file"); const relSourcePath = path.relative(projectRoot, file.path); const relOutputPath = path.join("data", "outputs", `${jobId}.shadow.dave.md`); const absOutputPath = path.resolve(projectRoot, relOutputPath); const now = new Date().toISOString(); const job = { id: jobId, status: "processing", createdAt: now, originalFilename: file.originalname || "", sourcePath: relSourcePath, outputPath: relOutputPath, style: privateUploadStyle, sourceBytes: Number(file.size || 0), sourceSha256: "", outputSha256: "", warnings: "", error: "", }; try { job.sourceSha256 = await sha256File(file.path); } catch (e) { job.status = "error"; job.error = String(e?.message || e || "hash_failed"); writeJob(jobsDir, job); return res.status(500).type("text/plain").send("Failed to hash upload"); } writeJob(jobsDir, job); void (async () => { try { const { warnings } = await generateShadowDossier({ inputPath: file.path, outputPath: absOutputPath }); job.warnings = warnings ? warnings.trim() : ""; job.outputSha256 = await sha256File(absOutputPath); job.status = job.warnings ? "done_with_warnings" : "done"; writeJob(jobsDir, job); } catch (e) { job.status = "error"; job.error = String(e?.message || e || "generation_failed"); writeJob(jobsDir, job); } })(); res.redirect(303, `/private/${encodeURIComponent(req.params.token)}/job/${encodeURIComponent(jobId)}`); }); if (fs.existsSync(distDir) && fs.existsSync(indexHtmlPath)) { app.use(express.static(distDir, { fallthrough: true })); app.get("*", (_req, res) => { res.setHeader("Content-Type", "text/html; charset=utf-8"); res.status(200).sendFile(indexHtmlPath); }); } else { app.get("*", (_req, res) => { res .status(503) .type("text/plain") .send("red-team site is not built yet. Run `npm install` then `npm run build`."); }); } app.listen(port, "0.0.0.0", () => { // eslint-disable-next-line no-console console.log(`red-team site listening on http://0.0.0.0:${port}`); }); } main();