Skip to content

Commit 2cef2d0

Browse files
author
The No Hands Company
committed
feat: form backend, build pipeline, 2FA, site transfer, bulk export/import, staging UI
Form submission backend (routes/forms.ts) - POST /api/forms/:domain/:formName — receive form POSTs from any HTML form Accepts application/x-www-form-urlencoded and application/json CORS headers allow cross-origin submissions from the site domain Honeypot + spam scoring (URL density, spam keywords, invalid email) _gotcha, _redirect, _subject meta-fields stripped before storage - GET /api/sites/:id/forms — list submissions with form breakdown - GET /api/sites/:id/forms/:name/export — CSV download - PATCH/DELETE /api/sites/:id/forms/:id — mark read/flag/delete - Email notification to site owner on non-spam submission - schema/forms.ts + migration SQL Build pipeline (routes/builds.ts) - POST /api/sites/:id/builds — clone git repo, install deps, run build, deploy output Auto-detects npm/yarn/pnpm from lock files Runs in isolated temp dir, cleaned up after build Full build log stored in DB, non-blocking (202 response, poll for status) Sends deploy success/failed email on completion - GET /api/sites/:id/builds — list builds (log size only in list) - GET /api/sites/:id/builds/:id — full build details + log - DELETE /api/sites/:id/builds/:id — cancel queued build - schema/builds.ts + migration SQL Two-factor authentication (routes/twoFactor.ts) - POST /auth/2fa/setup — generate TOTP secret + QR code (base64 data URL) Compatible with Google Authenticator, Authy, 1Password, Bitwarden - POST /auth/2fa/verify — confirm TOTP code and enable 2FA Returns 10 backup codes (hashed in DB, shown once) - POST /auth/2fa/validate — verify TOTP or backup code (for login flow) Backup codes are consumed on use (one-time) - POST /auth/2fa/disable — disable 2FA (requires valid TOTP) - POST /auth/2fa/backup — regenerate backup codes (requires valid TOTP) - GET /auth/2fa/status — check if 2FA is enabled - schema/totp.ts + migration SQL Site transfer (routes/transfer.ts) - POST /api/sites/:id/transfer — initiate transfer to email address Recipient must have a FedHost account Returns signed 24-hour token - POST /api/sites/:id/transfer/accept — accept transfer (authenticated) Email-address matching enforced Bulk export (routes/transfer.ts) - GET /api/sites/:id/export — export JSON manifest with: Site metadata, active deployment files (presigned 2-hour download URLs), redirect rules, custom headers Bulk import (routes/transfer.ts) - POST /api/sites/import — recreate site from export manifest Downloads and re-uploads all files to local storage Imports redirect rules and custom headers Returns count of imported/skipped files Staging deploy UI (DeploySite.tsx) - Environment selector (Production / Staging) above deploy button - Selected environment sent with deploy request - Different colour coding per environment Schema + migrations - forms.ts: formSubmissionsTable with JSONB data, spam_score, flagged, read - builds.ts: buildJobsTable with git_url, build_command, output_dir, log - totp.ts: totpCredentialsTable with secret, backup_codes JSONB - Migration SQL updated with all three tables + build_status enum
1 parent bd7f716 commit 2cef2d0

File tree

13 files changed

+1190
-2
lines changed

13 files changed

+1190
-2
lines changed

artifacts/api-server/package.json

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,13 @@
3131
"ioredis": "^5.6.1",
3232
"nodemailer": "^6.9.16",
3333
"openid-client": "^6.8.2",
34+
"otplib": "^12.0.1",
3435
"pino": "^10.3.1",
3536
"pino-http": "^11.0.0",
3637
"prom-client": "^15.1.3",
38+
"qrcode": "^1.5.4",
3739
"rate-limit-redis": "^4.2.0",
40+
"simple-git": "^3.27.0",
3841
"uuid": "^13.0.0"
3942
},
4043
"devDependencies": {
@@ -49,6 +52,7 @@
4952
"@types/ioredis": "^4.28.10",
5053
"vitest": "^3.2.4",
5154
"@vitest/coverage-v8": "^3.2.0",
52-
"@types/nodemailer": "^6.4.17"
55+
"@types/nodemailer": "^6.4.17",
56+
"@types/qrcode": "^1.5.5"
5357
}
5458
}

artifacts/api-server/src/lib/email.ts

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,32 @@ export async function emailInvitation(opts: {
235235
return sendMail({ to: opts.to, subject, html, text });
236236
}
237237

238-
export async function emailSiteDeleted(opts: {
238+
export async function emailFormSubmission(opts: {
239+
to: string;
240+
siteName: string;
241+
domain: string;
242+
formName: string;
243+
data: Record<string, string>;
244+
}) {
245+
const subject = `📬 New ${opts.formName} submission on ${opts.siteName}`;
246+
const rows = Object.entries(opts.data)
247+
.filter(([k]) => !k.startsWith("_"))
248+
.map(([k, v]) => `<tr><td style="padding:6px 12px;color:#9ca3af;border-right:1px solid rgba(255,255,255,.06)">${k}</td><td style="padding:6px 12px;color:#e4e4f0">${v}</td></tr>`)
249+
.join("");
250+
251+
const html = layout(`
252+
<div class="card">
253+
<h1>New form submission</h1>
254+
<p>You received a new <strong style="color:#fff">${opts.formName}</strong> submission on <strong style="color:#fff">${opts.domain}</strong>.</p>
255+
<table style="width:100%;border-collapse:collapse;border:1px solid rgba(255,255,255,.06);border-radius:8px;overflow:hidden;margin-top:16px">
256+
${rows}
257+
</table>
258+
</div>
259+
`, subject);
260+
const text = `New ${opts.formName} submission on ${opts.domain}:\n` +
261+
Object.entries(opts.data).filter(([k]) => !k.startsWith("_")).map(([k,v]) => `${k}: ${v}`).join("\n");
262+
return sendMail({ to: opts.to, subject, html, text });
263+
}
239264
to: string;
240265
siteName: string;
241266
domain: string;
Lines changed: 303 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,303 @@
1+
/**
2+
* Build pipeline — run npm/yarn/pnpm build from a Git repository.
3+
*
4+
* Operators can connect a Git repo and trigger builds that automatically:
5+
* 1. Clone / pull the repo at the specified branch
6+
* 2. Install dependencies (npm/yarn/pnpm auto-detected)
7+
* 3. Run the build command
8+
* 4. Deploy the output directory as a new site deployment
9+
*
10+
* Builds run in an isolated temp directory and are cleaned up after.
11+
* The full build log is streamed to the build_jobs table.
12+
*
13+
* Routes:
14+
* POST /api/sites/:id/builds — trigger a build
15+
* GET /api/sites/:id/builds — list build history
16+
* GET /api/sites/:id/builds/:buildId — get build details + log
17+
* DELETE /api/sites/:id/builds/:buildId — cancel a queued build
18+
*/
19+
20+
import { Router, type IRouter, type Request, type Response } from "express";
21+
import { z } from "zod/v4";
22+
import { db, sitesTable, buildJobsTable, siteFilesTable, siteDeploymentsTable } from "@workspace/db";
23+
import { eq, and, desc, count } from "drizzle-orm";
24+
import { asyncHandler, AppError } from "../lib/errors";
25+
import { writeLimiter, deployLimiter } from "../middleware/rateLimiter";
26+
import { storage } from "../lib/storageProvider";
27+
import { emailDeploySuccess, emailDeployFailed } from "../lib/email";
28+
import { invalidateSiteCache } from "../lib/domainCache";
29+
import logger from "../lib/logger";
30+
import { execFile } from "child_process";
31+
import { promisify } from "util";
32+
import fs from "fs";
33+
import path from "path";
34+
import os from "os";
35+
import mime from "mime-types";
36+
import crypto from "crypto";
37+
38+
const router: IRouter = Router();
39+
const execFileAsync = promisify(execFile);
40+
41+
const BuildTriggerBody = z.object({
42+
gitUrl: z.string().url().optional(),
43+
gitBranch: z.string().default("main"),
44+
buildCommand: z.string().max(500).default("npm run build"),
45+
outputDir: z.string().max(200).default("dist"),
46+
environment: z.enum(["production", "staging", "preview"]).default("production"),
47+
});
48+
49+
// ── Build runner ──────────────────────────────────────────────────────────────
50+
51+
async function appendLog(buildId: number, text: string): Promise<void> {
52+
await db.execute(
53+
`UPDATE build_jobs SET log = COALESCE(log, '') || $1 WHERE id = $2`
54+
.replace("$1", `'${text.replace(/'/g, "''")}'`)
55+
.replace("$2", String(buildId))
56+
);
57+
}
58+
59+
async function runBuild(buildId: number, siteId: number, opts: {
60+
gitUrl: string;
61+
gitBranch: string;
62+
buildCommand: string;
63+
outputDir: string;
64+
environment: string;
65+
userId: string;
66+
userEmail?: string;
67+
siteName: string;
68+
siteDomain: string;
69+
}): Promise<void> {
70+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "fedhost-build-"));
71+
const log = (msg: string) => {
72+
process.stdout.write(msg + "\n");
73+
appendLog(buildId, msg + "\n").catch(() => {});
74+
};
75+
76+
try {
77+
await db.update(buildJobsTable).set({ status: "running", startedAt: new Date() }).where(eq(buildJobsTable.id, buildId));
78+
79+
// ── Step 1: Clone ────────────────────────────────────────────────────────
80+
log(`[build] Cloning ${opts.gitUrl}@${opts.gitBranch}...`);
81+
await execFileAsync("git", ["clone", "--depth=1", "--branch", opts.gitBranch, opts.gitUrl, tmpDir], {
82+
timeout: 120_000,
83+
env: { ...process.env, GIT_TERMINAL_PROMPT: "0" },
84+
});
85+
log("[build] Clone complete");
86+
87+
// ── Step 2: Install ──────────────────────────────────────────────────────
88+
const hasYarnLock = fs.existsSync(path.join(tmpDir, "yarn.lock"));
89+
const hasPnpmLock = fs.existsSync(path.join(tmpDir, "pnpm-lock.yaml"));
90+
const installCmd = hasPnpmLock ? ["pnpm", ["install", "--frozen-lockfile"]]
91+
: hasYarnLock ? ["yarn", ["install", "--frozen-lockfile"]]
92+
: ["npm", ["ci", "--prefer-offline"]];
93+
94+
log(`[build] Installing dependencies (${installCmd[0]})...`);
95+
await execFileAsync(installCmd[0] as string, installCmd[1] as string[], {
96+
cwd: tmpDir, timeout: 300_000, env: { ...process.env, NODE_ENV: "production" },
97+
});
98+
99+
// ── Step 3: Build ────────────────────────────────────────────────────────
100+
const [cmd, ...args] = opts.buildCommand.split(" ");
101+
log(`[build] Running: ${opts.buildCommand}`);
102+
const { stdout, stderr } = await execFileAsync(cmd!, args, {
103+
cwd: tmpDir, timeout: 600_000,
104+
env: { ...process.env, NODE_ENV: "production", CI: "true" },
105+
});
106+
if (stdout) log(stdout);
107+
if (stderr) log(stderr);
108+
109+
// ── Step 4: Upload output files ─────────────────────────────────────────
110+
const outDir = path.join(tmpDir, opts.outputDir);
111+
if (!fs.existsSync(outDir)) throw new Error(`Output directory '${opts.outputDir}' not found after build`);
112+
113+
const allFiles = walkDir(outDir);
114+
log(`[build] Uploading ${allFiles.length} files...`);
115+
116+
// Create deployment record
117+
const [site] = await db.select().from(sitesTable).where(eq(sitesTable.id, siteId));
118+
const [latestDep] = await db.select({ version: siteDeploymentsTable.version })
119+
.from(siteDeploymentsTable).where(eq(siteDeploymentsTable.siteId, siteId))
120+
.orderBy(desc(siteDeploymentsTable.version)).limit(1);
121+
const version = (latestDep?.version ?? 0) + 1;
122+
123+
const deployment = await db.transaction(async (tx) => {
124+
// Mark existing active deployment
125+
await tx.update(siteDeploymentsTable)
126+
.set({ status: "failed" }) // temporarily mark as previous
127+
.where(and(eq(siteDeploymentsTable.siteId, siteId), eq(siteDeploymentsTable.status, "active")));
128+
129+
const [dep] = await tx.insert(siteDeploymentsTable).values({
130+
siteId, version, deployedBy: `build:${buildId}`,
131+
environment: opts.environment, status: "pending",
132+
fileCount: allFiles.length, totalSizeMb: 0,
133+
}).returning();
134+
return dep;
135+
});
136+
137+
// Upload files
138+
let totalBytes = 0;
139+
for (const relPath of allFiles) {
140+
const absPath = path.join(outDir, relPath);
141+
const stat = fs.statSync(absPath);
142+
const ct = (mime.lookup(relPath) || "application/octet-stream") as string;
143+
const hash = crypto.createHash("sha256").update(fs.readFileSync(absPath)).digest("hex");
144+
145+
const { uploadUrl, objectPath } = await storage.getUploadUrl({ contentType: ct, ttlSec: 900 });
146+
await fetch(uploadUrl, { method: "PUT", headers: { "Content-Type": ct }, body: fs.readFileSync(absPath) });
147+
148+
await db.insert(siteFilesTable).values({
149+
siteId, filePath: relPath, objectPath, contentType: ct,
150+
sizeBytes: stat.size, contentHash: hash, deploymentId: deployment.id,
151+
});
152+
totalBytes += stat.size;
153+
}
154+
155+
// Activate deployment
156+
await db.transaction(async (tx) => {
157+
await tx.update(siteDeploymentsTable)
158+
.set({ status: "active" })
159+
.where(eq(siteDeploymentsTable.id, deployment.id));
160+
await tx.update(sitesTable)
161+
.set({ storageUsedMb: totalBytes / (1024 * 1024) })
162+
.where(eq(sitesTable.id, siteId));
163+
});
164+
165+
invalidateSiteCache(siteId);
166+
167+
await db.update(buildJobsTable)
168+
.set({ status: "success", finishedAt: new Date() })
169+
.where(eq(buildJobsTable.id, buildId));
170+
171+
log(`[build] ✓ Deployed ${allFiles.length} files (${(totalBytes / 1024 / 1024).toFixed(1)}MB) as v${version}`);
172+
173+
if (opts.userEmail) {
174+
emailDeploySuccess({ to: opts.userEmail, siteName: opts.siteName, domain: opts.siteDomain, version, fileCount: allFiles.length, deployedAt: new Date().toUTCString() }).catch(() => {});
175+
}
176+
177+
} catch (err: any) {
178+
log(`[build] ✗ Build failed: ${err.message}`);
179+
await db.update(buildJobsTable)
180+
.set({ status: "failed", finishedAt: new Date() })
181+
.where(eq(buildJobsTable.id, buildId));
182+
183+
if (opts.userEmail) {
184+
emailDeployFailed({ to: opts.userEmail, siteName: opts.siteName, domain: opts.siteDomain, error: err.message }).catch(() => {});
185+
}
186+
} finally {
187+
fs.rmSync(tmpDir, { recursive: true, force: true });
188+
}
189+
}
190+
191+
function walkDir(dir: string, base = dir): string[] {
192+
const results: string[] = [];
193+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
194+
const full = path.join(dir, entry.name);
195+
if (entry.isDirectory()) {
196+
results.push(...walkDir(full, base));
197+
} else {
198+
results.push(path.relative(base, full));
199+
}
200+
}
201+
return results;
202+
}
203+
204+
// ── Routes ────────────────────────────────────────────────────────────────────
205+
206+
router.post("/sites/:id/builds", deployLimiter, asyncHandler(async (req: Request, res: Response) => {
207+
if (!req.isAuthenticated()) throw AppError.unauthorized();
208+
const siteId = parseInt(req.params.id as string, 10);
209+
if (isNaN(siteId)) throw AppError.badRequest("Invalid site ID");
210+
211+
const [site] = await db.select().from(sitesTable).where(eq(sitesTable.id, siteId));
212+
if (!site) throw AppError.notFound("Site not found");
213+
if (site.ownerId !== req.user.id) throw AppError.forbidden();
214+
215+
const parsed = BuildTriggerBody.safeParse(req.body);
216+
if (!parsed.success) throw AppError.badRequest(parsed.error.message);
217+
218+
const gitUrl = parsed.data.gitUrl ?? (site as any).gitUrl;
219+
if (!gitUrl) throw AppError.badRequest("No Git URL configured for this site. Provide gitUrl in request body.", "NO_GIT_URL");
220+
221+
// Check for already-running build
222+
const [running] = await db.select({ id: buildJobsTable.id }).from(buildJobsTable)
223+
.where(and(eq(buildJobsTable.siteId, siteId), eq(buildJobsTable.status, "running")));
224+
if (running) throw AppError.conflict("A build is already running for this site");
225+
226+
const [job] = await db.insert(buildJobsTable).values({
227+
siteId, triggeredBy: req.user.id,
228+
gitUrl, gitBranch: parsed.data.gitBranch,
229+
buildCommand: parsed.data.buildCommand,
230+
outputDir: parsed.data.outputDir,
231+
status: "queued",
232+
}).returning();
233+
234+
// Run build asynchronously — respond immediately
235+
res.status(202).json({ buildId: job.id, status: "queued", message: "Build started. Poll GET /api/sites/:id/builds/:buildId for status." });
236+
237+
runBuild(job.id, siteId, {
238+
gitUrl, gitBranch: parsed.data.gitBranch,
239+
buildCommand: parsed.data.buildCommand,
240+
outputDir: parsed.data.outputDir,
241+
environment: parsed.data.environment,
242+
userId: req.user.id, userEmail: req.user.email,
243+
siteName: site.name, siteDomain: site.domain,
244+
}).catch(err => logger.error({ err, buildId: job.id }, "[build] Unhandled error"));
245+
}));
246+
247+
router.get("/sites/:id/builds", asyncHandler(async (req: Request, res: Response) => {
248+
if (!req.isAuthenticated()) throw AppError.unauthorized();
249+
const siteId = parseInt(req.params.id as string, 10);
250+
if (isNaN(siteId)) throw AppError.badRequest("Invalid site ID");
251+
252+
const [site] = await db.select({ ownerId: sitesTable.ownerId }).from(sitesTable).where(eq(sitesTable.id, siteId));
253+
if (!site) throw AppError.notFound("Site not found");
254+
if (site.ownerId !== req.user.id) throw AppError.forbidden();
255+
256+
const limit = Math.min(50, Math.max(1, parseInt((req.query.limit as string) || "20", 10)));
257+
const page = Math.max(1, parseInt((req.query.page as string) || "1", 10));
258+
259+
const [{ total }] = await db.select({ total: count() }).from(buildJobsTable).where(eq(buildJobsTable.siteId, siteId));
260+
const builds = await db.select().from(buildJobsTable)
261+
.where(eq(buildJobsTable.siteId, siteId))
262+
.orderBy(desc(buildJobsTable.createdAt))
263+
.limit(limit).offset((page - 1) * limit);
264+
265+
// Strip log from list view for bandwidth
266+
res.json({ data: builds.map(b => ({ ...b, log: b.log ? `${b.log.length} chars` : null })), meta: { total: Number(total), page, limit } });
267+
}));
268+
269+
router.get("/sites/:id/builds/:buildId", asyncHandler(async (req: Request, res: Response) => {
270+
if (!req.isAuthenticated()) throw AppError.unauthorized();
271+
const siteId = parseInt(req.params.id as string, 10);
272+
const buildId = parseInt(req.params.buildId as string, 10);
273+
if (isNaN(siteId) || isNaN(buildId)) throw AppError.badRequest("Invalid ID");
274+
275+
const [site] = await db.select({ ownerId: sitesTable.ownerId }).from(sitesTable).where(eq(sitesTable.id, siteId));
276+
if (!site) throw AppError.notFound("Site not found");
277+
if (site.ownerId !== req.user.id) throw AppError.forbidden();
278+
279+
const [build] = await db.select().from(buildJobsTable)
280+
.where(and(eq(buildJobsTable.id, buildId), eq(buildJobsTable.siteId, siteId)));
281+
if (!build) throw AppError.notFound("Build not found");
282+
283+
res.json(build);
284+
}));
285+
286+
router.delete("/sites/:id/builds/:buildId", writeLimiter, asyncHandler(async (req: Request, res: Response) => {
287+
if (!req.isAuthenticated()) throw AppError.unauthorized();
288+
const siteId = parseInt(req.params.id as string, 10);
289+
const buildId = parseInt(req.params.buildId as string, 10);
290+
if (isNaN(siteId) || isNaN(buildId)) throw AppError.badRequest("Invalid ID");
291+
292+
const [site] = await db.select({ ownerId: sitesTable.ownerId }).from(sitesTable).where(eq(sitesTable.id, siteId));
293+
if (!site) throw AppError.notFound("Site not found");
294+
if (site.ownerId !== req.user.id) throw AppError.forbidden();
295+
296+
await db.update(buildJobsTable)
297+
.set({ status: "cancelled", finishedAt: new Date() })
298+
.where(and(eq(buildJobsTable.id, buildId), eq(buildJobsTable.siteId, siteId), eq(buildJobsTable.status, "queued")));
299+
300+
res.sendStatus(204);
301+
}));
302+
303+
export default router;

0 commit comments

Comments
 (0)