|
| 1 | +/** |
| 2 | + * Build pipeline — run npm/yarn/pnpm build from a Git repository. |
| 3 | + * |
| 4 | + * Operators can connect a Git repo and trigger builds that automatically: |
| 5 | + * 1. Clone / pull the repo at the specified branch |
| 6 | + * 2. Install dependencies (npm/yarn/pnpm auto-detected) |
| 7 | + * 3. Run the build command |
| 8 | + * 4. Deploy the output directory as a new site deployment |
| 9 | + * |
| 10 | + * Builds run in an isolated temp directory and are cleaned up after. |
| 11 | + * The full build log is streamed to the build_jobs table. |
| 12 | + * |
| 13 | + * Routes: |
| 14 | + * POST /api/sites/:id/builds — trigger a build |
| 15 | + * GET /api/sites/:id/builds — list build history |
| 16 | + * GET /api/sites/:id/builds/:buildId — get build details + log |
| 17 | + * DELETE /api/sites/:id/builds/:buildId — cancel a queued build |
| 18 | + */ |
| 19 | + |
| 20 | +import { Router, type IRouter, type Request, type Response } from "express"; |
| 21 | +import { z } from "zod/v4"; |
| 22 | +import { db, sitesTable, buildJobsTable, siteFilesTable, siteDeploymentsTable } from "@workspace/db"; |
| 23 | +import { eq, and, desc, count } from "drizzle-orm"; |
| 24 | +import { asyncHandler, AppError } from "../lib/errors"; |
| 25 | +import { writeLimiter, deployLimiter } from "../middleware/rateLimiter"; |
| 26 | +import { storage } from "../lib/storageProvider"; |
| 27 | +import { emailDeploySuccess, emailDeployFailed } from "../lib/email"; |
| 28 | +import { invalidateSiteCache } from "../lib/domainCache"; |
| 29 | +import logger from "../lib/logger"; |
| 30 | +import { execFile } from "child_process"; |
| 31 | +import { promisify } from "util"; |
| 32 | +import fs from "fs"; |
| 33 | +import path from "path"; |
| 34 | +import os from "os"; |
| 35 | +import mime from "mime-types"; |
| 36 | +import crypto from "crypto"; |
| 37 | + |
| 38 | +const router: IRouter = Router(); |
| 39 | +const execFileAsync = promisify(execFile); |
| 40 | + |
| 41 | +const BuildTriggerBody = z.object({ |
| 42 | + gitUrl: z.string().url().optional(), |
| 43 | + gitBranch: z.string().default("main"), |
| 44 | + buildCommand: z.string().max(500).default("npm run build"), |
| 45 | + outputDir: z.string().max(200).default("dist"), |
| 46 | + environment: z.enum(["production", "staging", "preview"]).default("production"), |
| 47 | +}); |
| 48 | + |
| 49 | +// ── Build runner ────────────────────────────────────────────────────────────── |
| 50 | + |
| 51 | +async function appendLog(buildId: number, text: string): Promise<void> { |
| 52 | + await db.execute( |
| 53 | + `UPDATE build_jobs SET log = COALESCE(log, '') || $1 WHERE id = $2` |
| 54 | + .replace("$1", `'${text.replace(/'/g, "''")}'`) |
| 55 | + .replace("$2", String(buildId)) |
| 56 | + ); |
| 57 | +} |
| 58 | + |
| 59 | +async function runBuild(buildId: number, siteId: number, opts: { |
| 60 | + gitUrl: string; |
| 61 | + gitBranch: string; |
| 62 | + buildCommand: string; |
| 63 | + outputDir: string; |
| 64 | + environment: string; |
| 65 | + userId: string; |
| 66 | + userEmail?: string; |
| 67 | + siteName: string; |
| 68 | + siteDomain: string; |
| 69 | +}): Promise<void> { |
| 70 | + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "fedhost-build-")); |
| 71 | + const log = (msg: string) => { |
| 72 | + process.stdout.write(msg + "\n"); |
| 73 | + appendLog(buildId, msg + "\n").catch(() => {}); |
| 74 | + }; |
| 75 | + |
| 76 | + try { |
| 77 | + await db.update(buildJobsTable).set({ status: "running", startedAt: new Date() }).where(eq(buildJobsTable.id, buildId)); |
| 78 | + |
| 79 | + // ── Step 1: Clone ──────────────────────────────────────────────────────── |
| 80 | + log(`[build] Cloning ${opts.gitUrl}@${opts.gitBranch}...`); |
| 81 | + await execFileAsync("git", ["clone", "--depth=1", "--branch", opts.gitBranch, opts.gitUrl, tmpDir], { |
| 82 | + timeout: 120_000, |
| 83 | + env: { ...process.env, GIT_TERMINAL_PROMPT: "0" }, |
| 84 | + }); |
| 85 | + log("[build] Clone complete"); |
| 86 | + |
| 87 | + // ── Step 2: Install ────────────────────────────────────────────────────── |
| 88 | + const hasYarnLock = fs.existsSync(path.join(tmpDir, "yarn.lock")); |
| 89 | + const hasPnpmLock = fs.existsSync(path.join(tmpDir, "pnpm-lock.yaml")); |
| 90 | + const installCmd = hasPnpmLock ? ["pnpm", ["install", "--frozen-lockfile"]] |
| 91 | + : hasYarnLock ? ["yarn", ["install", "--frozen-lockfile"]] |
| 92 | + : ["npm", ["ci", "--prefer-offline"]]; |
| 93 | + |
| 94 | + log(`[build] Installing dependencies (${installCmd[0]})...`); |
| 95 | + await execFileAsync(installCmd[0] as string, installCmd[1] as string[], { |
| 96 | + cwd: tmpDir, timeout: 300_000, env: { ...process.env, NODE_ENV: "production" }, |
| 97 | + }); |
| 98 | + |
| 99 | + // ── Step 3: Build ──────────────────────────────────────────────────────── |
| 100 | + const [cmd, ...args] = opts.buildCommand.split(" "); |
| 101 | + log(`[build] Running: ${opts.buildCommand}`); |
| 102 | + const { stdout, stderr } = await execFileAsync(cmd!, args, { |
| 103 | + cwd: tmpDir, timeout: 600_000, |
| 104 | + env: { ...process.env, NODE_ENV: "production", CI: "true" }, |
| 105 | + }); |
| 106 | + if (stdout) log(stdout); |
| 107 | + if (stderr) log(stderr); |
| 108 | + |
| 109 | + // ── Step 4: Upload output files ───────────────────────────────────────── |
| 110 | + const outDir = path.join(tmpDir, opts.outputDir); |
| 111 | + if (!fs.existsSync(outDir)) throw new Error(`Output directory '${opts.outputDir}' not found after build`); |
| 112 | + |
| 113 | + const allFiles = walkDir(outDir); |
| 114 | + log(`[build] Uploading ${allFiles.length} files...`); |
| 115 | + |
| 116 | + // Create deployment record |
| 117 | + const [site] = await db.select().from(sitesTable).where(eq(sitesTable.id, siteId)); |
| 118 | + const [latestDep] = await db.select({ version: siteDeploymentsTable.version }) |
| 119 | + .from(siteDeploymentsTable).where(eq(siteDeploymentsTable.siteId, siteId)) |
| 120 | + .orderBy(desc(siteDeploymentsTable.version)).limit(1); |
| 121 | + const version = (latestDep?.version ?? 0) + 1; |
| 122 | + |
| 123 | + const deployment = await db.transaction(async (tx) => { |
| 124 | + // Mark existing active deployment |
| 125 | + await tx.update(siteDeploymentsTable) |
| 126 | + .set({ status: "failed" }) // temporarily mark as previous |
| 127 | + .where(and(eq(siteDeploymentsTable.siteId, siteId), eq(siteDeploymentsTable.status, "active"))); |
| 128 | + |
| 129 | + const [dep] = await tx.insert(siteDeploymentsTable).values({ |
| 130 | + siteId, version, deployedBy: `build:${buildId}`, |
| 131 | + environment: opts.environment, status: "pending", |
| 132 | + fileCount: allFiles.length, totalSizeMb: 0, |
| 133 | + }).returning(); |
| 134 | + return dep; |
| 135 | + }); |
| 136 | + |
| 137 | + // Upload files |
| 138 | + let totalBytes = 0; |
| 139 | + for (const relPath of allFiles) { |
| 140 | + const absPath = path.join(outDir, relPath); |
| 141 | + const stat = fs.statSync(absPath); |
| 142 | + const ct = (mime.lookup(relPath) || "application/octet-stream") as string; |
| 143 | + const hash = crypto.createHash("sha256").update(fs.readFileSync(absPath)).digest("hex"); |
| 144 | + |
| 145 | + const { uploadUrl, objectPath } = await storage.getUploadUrl({ contentType: ct, ttlSec: 900 }); |
| 146 | + await fetch(uploadUrl, { method: "PUT", headers: { "Content-Type": ct }, body: fs.readFileSync(absPath) }); |
| 147 | + |
| 148 | + await db.insert(siteFilesTable).values({ |
| 149 | + siteId, filePath: relPath, objectPath, contentType: ct, |
| 150 | + sizeBytes: stat.size, contentHash: hash, deploymentId: deployment.id, |
| 151 | + }); |
| 152 | + totalBytes += stat.size; |
| 153 | + } |
| 154 | + |
| 155 | + // Activate deployment |
| 156 | + await db.transaction(async (tx) => { |
| 157 | + await tx.update(siteDeploymentsTable) |
| 158 | + .set({ status: "active" }) |
| 159 | + .where(eq(siteDeploymentsTable.id, deployment.id)); |
| 160 | + await tx.update(sitesTable) |
| 161 | + .set({ storageUsedMb: totalBytes / (1024 * 1024) }) |
| 162 | + .where(eq(sitesTable.id, siteId)); |
| 163 | + }); |
| 164 | + |
| 165 | + invalidateSiteCache(siteId); |
| 166 | + |
| 167 | + await db.update(buildJobsTable) |
| 168 | + .set({ status: "success", finishedAt: new Date() }) |
| 169 | + .where(eq(buildJobsTable.id, buildId)); |
| 170 | + |
| 171 | + log(`[build] ✓ Deployed ${allFiles.length} files (${(totalBytes / 1024 / 1024).toFixed(1)}MB) as v${version}`); |
| 172 | + |
| 173 | + if (opts.userEmail) { |
| 174 | + emailDeploySuccess({ to: opts.userEmail, siteName: opts.siteName, domain: opts.siteDomain, version, fileCount: allFiles.length, deployedAt: new Date().toUTCString() }).catch(() => {}); |
| 175 | + } |
| 176 | + |
| 177 | + } catch (err: any) { |
| 178 | + log(`[build] ✗ Build failed: ${err.message}`); |
| 179 | + await db.update(buildJobsTable) |
| 180 | + .set({ status: "failed", finishedAt: new Date() }) |
| 181 | + .where(eq(buildJobsTable.id, buildId)); |
| 182 | + |
| 183 | + if (opts.userEmail) { |
| 184 | + emailDeployFailed({ to: opts.userEmail, siteName: opts.siteName, domain: opts.siteDomain, error: err.message }).catch(() => {}); |
| 185 | + } |
| 186 | + } finally { |
| 187 | + fs.rmSync(tmpDir, { recursive: true, force: true }); |
| 188 | + } |
| 189 | +} |
| 190 | + |
| 191 | +function walkDir(dir: string, base = dir): string[] { |
| 192 | + const results: string[] = []; |
| 193 | + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { |
| 194 | + const full = path.join(dir, entry.name); |
| 195 | + if (entry.isDirectory()) { |
| 196 | + results.push(...walkDir(full, base)); |
| 197 | + } else { |
| 198 | + results.push(path.relative(base, full)); |
| 199 | + } |
| 200 | + } |
| 201 | + return results; |
| 202 | +} |
| 203 | + |
| 204 | +// ── Routes ──────────────────────────────────────────────────────────────────── |
| 205 | + |
| 206 | +router.post("/sites/:id/builds", deployLimiter, asyncHandler(async (req: Request, res: Response) => { |
| 207 | + if (!req.isAuthenticated()) throw AppError.unauthorized(); |
| 208 | + const siteId = parseInt(req.params.id as string, 10); |
| 209 | + if (isNaN(siteId)) throw AppError.badRequest("Invalid site ID"); |
| 210 | + |
| 211 | + const [site] = await db.select().from(sitesTable).where(eq(sitesTable.id, siteId)); |
| 212 | + if (!site) throw AppError.notFound("Site not found"); |
| 213 | + if (site.ownerId !== req.user.id) throw AppError.forbidden(); |
| 214 | + |
| 215 | + const parsed = BuildTriggerBody.safeParse(req.body); |
| 216 | + if (!parsed.success) throw AppError.badRequest(parsed.error.message); |
| 217 | + |
| 218 | + const gitUrl = parsed.data.gitUrl ?? (site as any).gitUrl; |
| 219 | + if (!gitUrl) throw AppError.badRequest("No Git URL configured for this site. Provide gitUrl in request body.", "NO_GIT_URL"); |
| 220 | + |
| 221 | + // Check for already-running build |
| 222 | + const [running] = await db.select({ id: buildJobsTable.id }).from(buildJobsTable) |
| 223 | + .where(and(eq(buildJobsTable.siteId, siteId), eq(buildJobsTable.status, "running"))); |
| 224 | + if (running) throw AppError.conflict("A build is already running for this site"); |
| 225 | + |
| 226 | + const [job] = await db.insert(buildJobsTable).values({ |
| 227 | + siteId, triggeredBy: req.user.id, |
| 228 | + gitUrl, gitBranch: parsed.data.gitBranch, |
| 229 | + buildCommand: parsed.data.buildCommand, |
| 230 | + outputDir: parsed.data.outputDir, |
| 231 | + status: "queued", |
| 232 | + }).returning(); |
| 233 | + |
| 234 | + // Run build asynchronously — respond immediately |
| 235 | + res.status(202).json({ buildId: job.id, status: "queued", message: "Build started. Poll GET /api/sites/:id/builds/:buildId for status." }); |
| 236 | + |
| 237 | + runBuild(job.id, siteId, { |
| 238 | + gitUrl, gitBranch: parsed.data.gitBranch, |
| 239 | + buildCommand: parsed.data.buildCommand, |
| 240 | + outputDir: parsed.data.outputDir, |
| 241 | + environment: parsed.data.environment, |
| 242 | + userId: req.user.id, userEmail: req.user.email, |
| 243 | + siteName: site.name, siteDomain: site.domain, |
| 244 | + }).catch(err => logger.error({ err, buildId: job.id }, "[build] Unhandled error")); |
| 245 | +})); |
| 246 | + |
| 247 | +router.get("/sites/:id/builds", asyncHandler(async (req: Request, res: Response) => { |
| 248 | + if (!req.isAuthenticated()) throw AppError.unauthorized(); |
| 249 | + const siteId = parseInt(req.params.id as string, 10); |
| 250 | + if (isNaN(siteId)) throw AppError.badRequest("Invalid site ID"); |
| 251 | + |
| 252 | + const [site] = await db.select({ ownerId: sitesTable.ownerId }).from(sitesTable).where(eq(sitesTable.id, siteId)); |
| 253 | + if (!site) throw AppError.notFound("Site not found"); |
| 254 | + if (site.ownerId !== req.user.id) throw AppError.forbidden(); |
| 255 | + |
| 256 | + const limit = Math.min(50, Math.max(1, parseInt((req.query.limit as string) || "20", 10))); |
| 257 | + const page = Math.max(1, parseInt((req.query.page as string) || "1", 10)); |
| 258 | + |
| 259 | + const [{ total }] = await db.select({ total: count() }).from(buildJobsTable).where(eq(buildJobsTable.siteId, siteId)); |
| 260 | + const builds = await db.select().from(buildJobsTable) |
| 261 | + .where(eq(buildJobsTable.siteId, siteId)) |
| 262 | + .orderBy(desc(buildJobsTable.createdAt)) |
| 263 | + .limit(limit).offset((page - 1) * limit); |
| 264 | + |
| 265 | + // Strip log from list view for bandwidth |
| 266 | + res.json({ data: builds.map(b => ({ ...b, log: b.log ? `${b.log.length} chars` : null })), meta: { total: Number(total), page, limit } }); |
| 267 | +})); |
| 268 | + |
| 269 | +router.get("/sites/:id/builds/:buildId", asyncHandler(async (req: Request, res: Response) => { |
| 270 | + if (!req.isAuthenticated()) throw AppError.unauthorized(); |
| 271 | + const siteId = parseInt(req.params.id as string, 10); |
| 272 | + const buildId = parseInt(req.params.buildId as string, 10); |
| 273 | + if (isNaN(siteId) || isNaN(buildId)) throw AppError.badRequest("Invalid ID"); |
| 274 | + |
| 275 | + const [site] = await db.select({ ownerId: sitesTable.ownerId }).from(sitesTable).where(eq(sitesTable.id, siteId)); |
| 276 | + if (!site) throw AppError.notFound("Site not found"); |
| 277 | + if (site.ownerId !== req.user.id) throw AppError.forbidden(); |
| 278 | + |
| 279 | + const [build] = await db.select().from(buildJobsTable) |
| 280 | + .where(and(eq(buildJobsTable.id, buildId), eq(buildJobsTable.siteId, siteId))); |
| 281 | + if (!build) throw AppError.notFound("Build not found"); |
| 282 | + |
| 283 | + res.json(build); |
| 284 | +})); |
| 285 | + |
| 286 | +router.delete("/sites/:id/builds/:buildId", writeLimiter, asyncHandler(async (req: Request, res: Response) => { |
| 287 | + if (!req.isAuthenticated()) throw AppError.unauthorized(); |
| 288 | + const siteId = parseInt(req.params.id as string, 10); |
| 289 | + const buildId = parseInt(req.params.buildId as string, 10); |
| 290 | + if (isNaN(siteId) || isNaN(buildId)) throw AppError.badRequest("Invalid ID"); |
| 291 | + |
| 292 | + const [site] = await db.select({ ownerId: sitesTable.ownerId }).from(sitesTable).where(eq(sitesTable.id, siteId)); |
| 293 | + if (!site) throw AppError.notFound("Site not found"); |
| 294 | + if (site.ownerId !== req.user.id) throw AppError.forbidden(); |
| 295 | + |
| 296 | + await db.update(buildJobsTable) |
| 297 | + .set({ status: "cancelled", finishedAt: new Date() }) |
| 298 | + .where(and(eq(buildJobsTable.id, buildId), eq(buildJobsTable.siteId, siteId), eq(buildJobsTable.status, "queued"))); |
| 299 | + |
| 300 | + res.sendStatus(204); |
| 301 | +})); |
| 302 | + |
| 303 | +export default router; |
0 commit comments