From 5edd132c7612d682c304f12117480e404e2916b1 Mon Sep 17 00:00:00 2001 From: leonardcser <73912641+leonardcser@users.noreply.github.com> Date: Tue, 30 Sep 2025 17:40:40 +0200 Subject: [PATCH 01/29] feat: implement durable objects for editor page --- apps/api/src/context.ts | 2 + apps/api/src/durable-objects/workflow-do.ts | 435 +++++++++++++++++++ apps/api/src/index.ts | 3 + apps/api/src/routes/workflows.ts | 107 ++++- apps/api/src/routes/ws.ts | 31 ++ apps/api/src/runtime/runtime.ts | 48 +- apps/api/src/utils/encryption.test.ts | 1 + apps/api/wrangler.jsonc | 24 + apps/web/src/hooks/use-editable-workflow.ts | 348 ++++++++------- apps/web/src/pages/editor-page.tsx | 139 +++--- apps/web/src/services/workflow-do-service.ts | 184 ++++++++ apps/web/src/utils/utils.ts | 2 +- packages/types/src/workflow.ts | 74 ++++ 13 files changed, 1141 insertions(+), 257 deletions(-) create mode 100644 apps/api/src/durable-objects/workflow-do.ts create mode 100644 apps/api/src/routes/ws.ts create mode 100644 apps/web/src/services/workflow-do-service.ts diff --git a/apps/api/src/context.ts b/apps/api/src/context.ts index 8832023c..f189ecfe 100644 --- a/apps/api/src/context.ts +++ b/apps/api/src/context.ts @@ -1,6 +1,7 @@ import { JWTTokenPayload } from "@dafthunk/types"; import { RuntimeParams } from "./runtime/runtime"; +import { WorkflowDO } from "./durable-objects/workflow-do"; export interface Bindings { DB: D1Database; @@ -9,6 +10,7 @@ export interface Bindings { RATE_LIMIT_AUTH: RateLimit; RATE_LIMIT_EXECUTE: RateLimit; EXECUTE: Workflow; + WORKFLOW_DO: DurableObjectNamespace; RESSOURCES: R2Bucket; DATASETS: R2Bucket; DATASETS_AUTORAG: string; diff --git a/apps/api/src/durable-objects/workflow-do.ts b/apps/api/src/durable-objects/workflow-do.ts new file mode 100644 index 00000000..f567e5a0 --- /dev/null +++ b/apps/api/src/durable-objects/workflow-do.ts @@ -0,0 +1,435 @@ +import { + WorkflowDOAckMessage, + WorkflowDOErrorMessage, + WorkflowDOExecuteMessage, + WorkflowDOExecutionUpdateMessage, + WorkflowDOInitMessage, + WorkflowDOMessage, + WorkflowDOState, + WorkflowDOUpdateMessage, + WorkflowExecution, + WorkflowType, +} from "@dafthunk/types"; +import { DurableObject } from "cloudflare:workers"; + +import { Bindings } from "../context"; +import { createDatabase } from "../db/index"; +import { getWorkflow, updateWorkflow } from "../db/queries"; + +export class WorkflowDO extends DurableObject { + private sql: SqlStorage; + private connectedClients: Set = new Set(); + private currentExecution: WorkflowExecution | null = null; + private workflowId: string = ""; + private organizationId: string = ""; + private loaded: boolean = false; + private dirty: boolean = false; + + constructor(ctx: DurableObjectState, env: Bindings) { + super(ctx, env); + this.sql = this.ctx.storage.sql; + this.initDatabase(); + } + + private initDatabase() { + this.sql.exec(` + CREATE TABLE IF NOT EXISTS states ( + id TEXT PRIMARY KEY, + nodes TEXT NOT NULL, + edges TEXT NOT NULL, + timestamp INTEGER NOT NULL + ) + `); + this.sql.exec(` + CREATE TABLE IF NOT EXISTS metadata ( + id TEXT PRIMARY KEY, + workflow_id TEXT NOT NULL, + organization_id TEXT NOT NULL, + workflow_name TEXT NOT NULL, + workflow_handle TEXT NOT NULL, + workflow_type TEXT NOT NULL + ) + `); + } + + /** + * Load workflow from database into DO storage if not already loaded + */ + private async ensureLoaded( + workflowId: string, + organizationId: string + ): Promise { + if (this.loaded) { + return; + } + + this.workflowId = workflowId; + this.organizationId = organizationId; + + // Ensure metadata exists + let metadataRow = this.sql + .exec("SELECT * FROM metadata WHERE id = ?", "default") + .toArray()[0]; + + if (!metadataRow) { + try { + const db = createDatabase(this.env.DB); + const workflow = await getWorkflow(db, workflowId, organizationId); + if (workflow) { + const workflowData = workflow.data as any; + this.sql.exec( + `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) + VALUES (?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + workflow_id = excluded.workflow_id, + organization_id = excluded.organization_id, + workflow_name = excluded.workflow_name, + workflow_handle = excluded.workflow_handle, + workflow_type = excluded.workflow_type`, + "default", + workflowId, + organizationId, + workflow.name, + workflow.handle, + (workflowData.type || "manual") as WorkflowType + ); + } else { + // Minimal metadata for new workflow + this.sql.exec( + `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) + VALUES (?, ?, ?, ?, ?, ?)`, + "default", + workflowId, + organizationId, + "New Workflow", + workflowId, + "manual" as WorkflowType + ); + } + } catch (error) { + console.error("Error loading workflow metadata:", error); + } + } + + // Ensure states entry exists + const statesRow = this.sql + .exec("SELECT * FROM states WHERE id = ?", "default") + .toArray()[0]; + if (!statesRow) { + const timestamp = Date.now(); + this.sql.exec( + `INSERT INTO states (id, nodes, edges, timestamp) + VALUES (?, ?, ?, ?)`, + "default", + JSON.stringify([]), + JSON.stringify([]), + timestamp + ); + } + + this.loaded = true; + } + + /** + * Get state from DO storage (internal use) + */ + private async getStateInternal(): Promise { + const statesCursor = this.sql.exec( + "SELECT nodes, edges, timestamp FROM states WHERE id = ?", + "default" + ); + const statesRow = statesCursor.toArray()[0]; + + const metadataCursor = this.sql.exec( + "SELECT workflow_id as id, workflow_name as name, workflow_handle as handle, workflow_type as type FROM metadata WHERE id = ?", + "default" + ); + const metadataRow = metadataCursor.toArray()[0]; + + if (!statesRow || !metadataRow) { + throw new Error("State or metadata missing; call ensureLoaded first"); + } + + return { + id: metadataRow.id as string, + name: metadataRow.name as string, + handle: metadataRow.handle as string, + type: metadataRow.type as WorkflowType, + nodes: JSON.parse(statesRow.nodes as string), + edges: JSON.parse(statesRow.edges as string), + timestamp: statesRow.timestamp as number, + }; + } + + /** + * Get state (public API) + */ + async getState(): Promise { + return await this.getStateInternal(); + } + + async updateState(nodes: unknown[], edges: unknown[]): Promise { + const timestamp = Date.now(); + this.sql.exec( + `INSERT INTO states (id, nodes, edges, timestamp) + VALUES (?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + nodes = excluded.nodes, + edges = excluded.edges, + timestamp = excluded.timestamp`, + "default", + JSON.stringify(nodes), + JSON.stringify(edges), + timestamp + ); + + this.dirty = true; + + // Schedule an alarm to persist to database in 60 seconds if not already scheduled + const currentAlarm = await this.ctx.storage.getAlarm(); + if (currentAlarm === null) { + await this.ctx.storage.setAlarm(Date.now() + 60000); + } + } + + /** + * Persist DO state back to database + */ + private async persistToDatabase(): Promise { + if (!this.dirty || !this.workflowId || !this.organizationId) { + return; + } + + try { + const state = await this.getState(); + const db = createDatabase(this.env.DB); + await updateWorkflow(db, this.workflowId, this.organizationId, { + name: state.name, + data: { + id: state.id, + name: state.name, + handle: state.handle, + type: state.type, + nodes: state.nodes, + edges: state.edges, + }, + }); + + this.dirty = false; + console.log(`Persisted workflow ${this.workflowId} to database`); + } catch (error) { + console.error("Error persisting workflow to database:", error); + } + } + + /** + * Alarm handler - called when alarm fires + */ + async alarm(): Promise { + console.log("Alarm fired for WorkflowDO"); + await this.persistToDatabase(); + + // If still dirty (updates happened during persist), schedule another alarm + if (this.dirty) { + await this.ctx.storage.setAlarm(Date.now() + 60000); + } + } + + private broadcastExecutionUpdate(execution: WorkflowExecution) { + const message: WorkflowDOExecutionUpdateMessage = { + type: "execution_update", + executionId: execution.id, + status: execution.status, + nodeExecutions: execution.nodeExecutions, + error: execution.error, + }; + + const messageStr = JSON.stringify(message); + for (const client of this.connectedClients) { + try { + client.send(messageStr); + } catch (error) { + console.error("Error broadcasting to client:", error); + } + } + } + + async fetch(request: Request): Promise { + const url = new URL(request.url); + + // Extract workflowId and organizationId from query params + const workflowId = url.searchParams.get("workflowId") || ""; + const organizationId = url.searchParams.get("organizationId") || ""; + + // Ensure workflow is loaded from database + if (workflowId && organizationId) { + await this.ensureLoaded(workflowId, organizationId); + } + + // Handle GET request for workflow state + if (url.pathname === "/state" && request.method === "GET") { + try { + const state = await this.getState(); + + return new Response(JSON.stringify(state), { + headers: { "Content-Type": "application/json" }, + }); + } catch (error) { + console.error("Error getting workflow state:", error); + return new Response( + JSON.stringify({ + error: "Failed to get workflow state", + details: error instanceof Error ? error.message : "Unknown error", + }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } + } + + // Handle execution updates from the runtime + if (url.pathname === "/execution" && request.method === "POST") { + try { + const execution = (await request.json()) as WorkflowExecution; + await this.updateExecution(execution); + return new Response(JSON.stringify({ success: true }), { + headers: { "Content-Type": "application/json" }, + }); + } catch (error) { + console.error("Error updating execution:", error); + return new Response( + JSON.stringify({ + error: "Failed to update execution", + details: error instanceof Error ? error.message : "Unknown error", + }), + { + status: 500, + headers: { "Content-Type": "application/json" }, + } + ); + } + } + + // Handle WebSocket connections (ensureLoaded called earlier if params present) + const upgradeHeader = request.headers.get("Upgrade"); + if (!upgradeHeader || upgradeHeader !== "websocket") { + return new Response("Expected WebSocket or /state GET request", { + status: 426, + }); + } + + const webSocketPair = new WebSocketPair(); + const [client, server] = Object.values(webSocketPair); + + this.ctx.acceptWebSocket(server); + this.connectedClients.add(server); + + // Send initial state + let initState: WorkflowDOState; + try { + initState = await this.getState(); + } catch { + // Fallback minimal state + initState = { + id: workflowId, + name: "New Workflow", + handle: workflowId, + type: "manual", + nodes: [], + edges: [], + timestamp: Date.now(), + }; + } + const initMessage: WorkflowDOInitMessage = { + type: "init", + state: initState, + }; + server.send(JSON.stringify(initMessage)); + + // If there's an ongoing execution, send the current state + if (this.currentExecution) { + this.broadcastExecutionUpdate(this.currentExecution); + } + + return new Response(null, { + status: 101, + webSocket: client, + }); + } + + async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { + try { + if (typeof message !== "string") { + const errorMsg: WorkflowDOErrorMessage = { + error: "Expected string message", + }; + ws.send(JSON.stringify(errorMsg)); + return; + } + + const data = JSON.parse(message) as WorkflowDOMessage; + + if ("type" in data && data.type === "update") { + const updateMsg = data as WorkflowDOUpdateMessage; + await this.updateState(updateMsg.nodes, updateMsg.edges); + + // Optionally echo back confirmation + const ackMsg: WorkflowDOAckMessage = { + type: "ack", + timestamp: Date.now(), + }; + ws.send(JSON.stringify(ackMsg)); + } else if ("type" in data && data.type === "execute") { + const executeMsg = data as WorkflowDOExecuteMessage; + + // Store the execution ID so we can track updates from the runtime + this.currentExecution = { + id: executeMsg.executionId, + workflowId: this.workflowId, + status: "submitted", + nodeExecutions: [], + }; + + // Broadcast initial execution state to all clients + this.broadcastExecutionUpdate(this.currentExecution); + } + } catch (error) { + console.error("WebSocket message error:", error); + const errorMsg: WorkflowDOErrorMessage = { + error: "Failed to process message", + details: error instanceof Error ? error.message : "Unknown error", + }; + ws.send(JSON.stringify(errorMsg)); + } + } + + async webSocketClose( + ws: WebSocket, + code: number, + reason: string, + _wasClean: boolean + ) { + this.connectedClients.delete(ws); + ws.close(code, reason); + } + + /** + * Called by the runtime workflow to push execution updates to connected clients + */ + async updateExecution(execution: WorkflowExecution) { + this.currentExecution = execution; + this.broadcastExecutionUpdate(execution); + + // Clear current execution if it's in a terminal state + if ( + execution.status === "completed" || + execution.status === "error" || + execution.status === "cancelled" || + execution.status === "exhausted" + ) { + this.currentExecution = null; + } + } +} diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index c260d8f5..bc19c600 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,5 +1,6 @@ import { Hono } from "hono"; export { Runtime } from "./runtime/runtime"; +export { WorkflowDO } from "./durable-objects/workflow-do"; import auth from "./auth"; import { ApiContext } from "./context"; import { handleCronTriggers } from "./cron"; @@ -21,6 +22,7 @@ import secretRoutes from "./routes/secrets"; import typeRoutes from "./routes/types"; import usageRoutes from "./routes/usage"; import workflowRoutes from "./routes/workflows"; +import wsRoutes from "./routes/ws"; // Initialize Hono app with types const app = new Hono(); @@ -65,6 +67,7 @@ app.route("/:organizationIdOrHandle/secrets", secretRoutes); app.route("/:organizationIdOrHandle/workflows", workflowRoutes); app.route("/:organizationIdOrHandle/objects", objectRoutes); app.route("/:organizationIdOrHandle/usage", usageRoutes); +app.route("/:organizationIdOrHandle/ws", wsRoutes); export default { scheduled: handleCronTriggers, diff --git a/apps/api/src/routes/workflows.ts b/apps/api/src/routes/workflows.ts index e5683536..7d6c1a9c 100644 --- a/apps/api/src/routes/workflows.ts +++ b/apps/api/src/routes/workflows.ts @@ -157,28 +157,63 @@ workflowRoutes.post( */ workflowRoutes.get("/:id", jwtMiddleware, async (c) => { const id = c.req.param("id"); - const db = createDatabase(c.env.DB); - const organizationId = c.get("organizationId")!; - const workflow = await getWorkflow(db, id, organizationId); - if (!workflow) { - return c.json({ error: "Workflow not found" }, 404); + const userId = c.var.jwtPayload?.sub; + + if (!userId) { + return c.json({ error: "Unauthorized" }, 401); } - const workflowData = workflow.data; - - const response: GetWorkflowResponse = { - id: workflow.id, - name: workflow.name, - handle: workflow.handle, - type: workflowData.type, - createdAt: workflow.createdAt, - updatedAt: workflow.updatedAt, - nodes: workflowData.nodes || [], - edges: workflowData.edges || [], - }; + try { + // Get workflow from Durable Object + const doId = c.env.WORKFLOW_DO.idFromName(`${userId}-${id}`); + const workflowData = await c.env.WORKFLOW_DO.get(doId).getState(); + + if (!workflowData) { + // If DO doesn't have it, fall back to database + const db = createDatabase(c.env.DB); + const workflow = await getWorkflow(db, id, organizationId); + if (!workflow) { + return c.json({ error: "Workflow not found" }, 404); + } - return c.json(response); + const workflowData = workflow.data; + + const response: GetWorkflowResponse = { + id: workflow.id, + name: workflow.name, + handle: workflow.handle, + type: workflowData.type, + createdAt: workflow.createdAt, + updatedAt: workflow.updatedAt, + nodes: workflowData.nodes || [], + edges: workflowData.edges || [], + }; + + return c.json(response); + } + + // Get metadata from database for createdAt/updatedAt + const db = createDatabase(c.env.DB); + const workflow = await getWorkflow(db, id, organizationId); + + const response: GetWorkflowResponse = { + id: workflowData.id, + name: workflowData.name, + handle: workflowData.handle, + type: workflowData.type, + createdAt: workflow?.createdAt || new Date(), + updatedAt: workflow?.updatedAt || new Date(), + // @ts-ignore + nodes: workflowData.nodes || [], + edges: workflowData.edges || [], + }; + + return c.json(response); + } catch (error) { + console.error("Error fetching workflow:", error); + return c.json({ error: "Failed to fetch workflow" }, 500); + } }); /** @@ -482,9 +517,39 @@ workflowRoutes.post( let deploymentId: string | undefined; if (version === "dev") { - // Get workflow data directly - workflow = await getWorkflow(db, workflowIdOrHandle, organizationId); - workflowData = workflow.data; + // Get workflow data from Durable Object first + let userId: string; + const jwtPayload = c.get("jwtPayload") as JWTTokenPayload | undefined; + if (jwtPayload) { + userId = jwtPayload.sub || "anonymous"; + } else { + userId = "api"; // Use a placeholder for API-triggered executions + } + + const doId = c.env.WORKFLOW_DO.idFromName( + `${userId}-${workflowIdOrHandle}` + ); + const state = await c.env.WORKFLOW_DO.get(doId).getState(); + + if (state) { + workflowData = { + type: state.type, + nodes: state.nodes || [], + edges: state.edges || [], + }; + workflow = { + id: workflowIdOrHandle, + name: state.name, + handle: state.handle, + }; + } else { + // Fallback to database + workflow = await getWorkflow(db, workflowIdOrHandle, organizationId); + if (!workflow) { + return c.json({ error: "Workflow not found" }, 404); + } + workflowData = workflow.data; + } } else { // Get deployment based on version let deployment; diff --git a/apps/api/src/routes/ws.ts b/apps/api/src/routes/ws.ts new file mode 100644 index 00000000..2db982a0 --- /dev/null +++ b/apps/api/src/routes/ws.ts @@ -0,0 +1,31 @@ +import { Hono } from "hono"; +import { jwtMiddleware } from "../auth"; +import { ApiContext } from "../context"; + +const wsRoutes = new Hono(); + +// WebSocket endpoint for real-time workflow state synchronization +wsRoutes.get("/", jwtMiddleware, async (c) => { + const upgradeHeader = c.req.header("Upgrade"); + + if (!upgradeHeader || upgradeHeader !== "websocket") { + return c.json({ error: "Expected WebSocket connection" }, 426); + } + + const userId = c.var.jwtPayload?.sub; + const workflowId = c.req.query("workflowId"); + + if (!userId || !workflowId) { + console.error("Missing userId or workflowId:", { userId, workflowId }); + return c.json({ error: "Missing userId or workflowId" }, 400); + } + + // Create a unique DO ID for this user + workflow combination + const doId = c.env.WORKFLOW_DO.idFromName(`${userId}-${workflowId}`); + const stub = c.env.WORKFLOW_DO.get(doId); + + // Proxy the WebSocket connection to the Durable Object + return stub.fetch(c.req.raw); +}); + +export default wsRoutes; diff --git a/apps/api/src/runtime/runtime.ts b/apps/api/src/runtime/runtime.ts index bb43f6c7..d8d8b42a 100644 --- a/apps/api/src/runtime/runtime.ts +++ b/apps/api/src/runtime/runtime.ts @@ -1187,6 +1187,29 @@ export class Runtime extends WorkflowEntrypoint { return ordered.length === workflow.nodes.length ? ordered : []; } + /** + * Pushes execution update to the Durable Object for real-time updates + */ + private async pushExecutionUpdateToDO( + userId: string, + workflowId: string, + execution: WorkflowExecution + ): Promise { + try { + // Create the Durable Object ID from userId + workflowId + const doId = this.env.WORKFLOW_DO.idFromName(`${userId}-${workflowId}`); + const stub = this.env.WORKFLOW_DO.get(doId); + + await stub.updateExecution(execution); + } catch (error) { + console.error( + "Failed to push execution update to Durable Object:", + error + ); + // Don't throw - this is a non-critical operation + } + } + /** * Persists the workflow execution state to the database. */ @@ -1233,9 +1256,19 @@ export class Runtime extends WorkflowEntrypoint { ? Array.from(runtimeState.nodeErrors.values()).join(", ") : undefined; + const execution: WorkflowExecution = { + id: instanceId, + workflowId, + status: executionStatus, + nodeExecutions: nodeExecutionList, + error: errorMsg, + startedAt, + endedAt, + }; + try { const db = createDatabase(this.env.DB); - return await saveExecution(db, { + await saveExecution(db, { id: instanceId, workflowId, userId, @@ -1252,15 +1285,10 @@ export class Runtime extends WorkflowEntrypoint { // Continue without interrupting the workflow. } - return { - id: instanceId, - workflowId, - status: executionStatus, - nodeExecutions: nodeExecutionList, - error: errorMsg, - startedAt, - endedAt, - }; + // Push update to Durable Object for real-time updates + await this.pushExecutionUpdateToDO(userId, workflowId, execution); + + return execution; } /** diff --git a/apps/api/src/utils/encryption.test.ts b/apps/api/src/utils/encryption.test.ts index 2061c30f..4eef7c78 100644 --- a/apps/api/src/utils/encryption.test.ts +++ b/apps/api/src/utils/encryption.test.ts @@ -20,6 +20,7 @@ const createMockEnv = (masterKey?: string): Bindings => ({ RATE_LIMIT_AUTH: {} as RateLimit, RATE_LIMIT_EXECUTE: {} as RateLimit, EXECUTE: {} as Workflow, + WORKFLOW_DO: {} as DurableObjectNamespace, RESSOURCES: {} as R2Bucket, DATASETS: {} as R2Bucket, DATASETS_AUTORAG: "", diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index 842cab78..a9a1a8bd 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -67,6 +67,21 @@ "analytics_engine_datasets": [ { "binding": "COMPUTE", "dataset": "dafthunk-compute-development" } ], + "durable_objects": { + "bindings": [ + { + "name": "WORKFLOW_DO", + "class_name": "WorkflowDO", + "script_name": "dafthunk-api" + } + ] + }, + "migrations": [ + { + "tag": "v1", + "new_sqlite_classes": ["WorkflowDO"] + } + ], "unsafe": { "bindings": [ { @@ -155,6 +170,15 @@ "analytics_engine_datasets": [ { "binding": "COMPUTE", "dataset": "dafthunk-compute-production" } ], + "durable_objects": { + "bindings": [ + { + "name": "WORKFLOW_DO", + "class_name": "WorkflowDO", + "script_name": "dafthunk-api" + } + ] + }, "unsafe": { "bindings": [ { diff --git a/apps/web/src/hooks/use-editable-workflow.ts b/apps/web/src/hooks/use-editable-workflow.ts index 0285e6e3..f3f42d22 100644 --- a/apps/web/src/hooks/use-editable-workflow.ts +++ b/apps/web/src/hooks/use-editable-workflow.ts @@ -1,211 +1,234 @@ -import type { Parameter, ParameterType, Workflow } from "@dafthunk/types"; +import type { Parameter, ParameterType, WorkflowExecution } from "@dafthunk/types"; import type { Edge, Node } from "@xyflow/react"; -import { useCallback, useEffect, useMemo, useState } from "react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { useAuth } from "@/components/auth-context"; import type { NodeTemplate, WorkflowEdgeType, WorkflowNodeType, -} from "@/components/workflow/workflow-types"; // Corrected import path -import { updateWorkflow } from "@/services/workflow-service"; +} from "@/components/workflow/workflow-types"; +import { + connectWorkflowWS, + WorkflowDOState, + WorkflowWebSocket, +} from "@/services/workflow-do-service"; import { adaptDeploymentNodesToReactFlowNodes } from "@/utils/utils"; import { debounce } from "@/utils/utils"; interface UseEditableWorkflowProps { workflowId: string | undefined; - currentWorkflow: Workflow | null | undefined; - isWorkflowDetailsLoading: boolean; - workflowDetailsError: Error | null; nodeTemplates?: NodeTemplate[]; + enableWebSocket?: boolean; + onExecutionUpdate?: (execution: WorkflowExecution) => void; } export function useEditableWorkflow({ workflowId, - currentWorkflow, - isWorkflowDetailsLoading, - workflowDetailsError, nodeTemplates = [], + enableWebSocket = true, // Enable by default now + onExecutionUpdate, }: UseEditableWorkflowProps) { const [nodes, setNodes] = useState[]>([]); const [edges, setEdges] = useState[]>([]); const [isInitializing, setIsInitializing] = useState(true); const [processingError, setProcessingError] = useState(null); const [savingError, setSavingError] = useState(null); + const wsRef = useRef(null); + const [isWSConnected, setIsWSConnected] = useState(false); + const [workflowMetadata, setWorkflowMetadata] = useState<{ + id: string; + name: string; + handle: string; + type: string; + } | null>(null); // Get the organization from the auth context at the hook level const { organization } = useAuth(); - // Effect to initialize nodes and edges from currentWorkflow + // WebSocket is now the primary source of workflow data + // No effect needed here - state is set via WebSocket onInit callback + + // WebSocket connection effect useEffect(() => { - if (isWorkflowDetailsLoading) { - setIsInitializing(true); + // Don't connect if WebSocket is not enabled or missing required data + if (!enableWebSocket || !workflowId || !organization?.handle) { + setIsInitializing(false); return; } - if (workflowDetailsError || !currentWorkflow) { - setIsInitializing(false); - if (workflowDetailsError) { - setProcessingError( - workflowDetailsError.message || "Failed to load workflow data." - ); - } - setNodes([]); - setEdges([]); + // Prevent duplicate connections if already connected + if (wsRef.current?.isConnected()) { return; } - try { - const reactFlowNodes = adaptDeploymentNodesToReactFlowNodes( - currentWorkflow.nodes, - nodeTemplates - ); - const reactFlowEdges = currentWorkflow.edges.map((edge, index) => ({ - id: `e${index}`, - source: edge.source, - target: edge.target, - sourceHandle: edge.sourceOutput, - targetHandle: edge.targetInput, - type: "workflowEdge", - data: { - isValid: true, - sourceType: edge.sourceOutput, - targetType: edge.targetInput, - }, - })); - - setNodes(reactFlowNodes); - setEdges(reactFlowEdges); - setProcessingError(null); - } catch (error) { - console.error("Error processing workflow data into React Flow:", error); - setProcessingError( - error instanceof Error - ? error.message - : "Error adapting workflow data for editor." - ); - setNodes([]); - setEdges([]); - } finally { - setIsInitializing(false); - } - }, [ - currentWorkflow, - isWorkflowDetailsLoading, - workflowDetailsError, - nodeTemplates, - ]); + // Start initializing + setIsInitializing(true); + + // Add a small delay to avoid race conditions during React strict mode double-mount + const timeoutId = setTimeout(() => { + // Double-check we're not already connected after the delay + if (wsRef.current?.isConnected()) { + return; + } + + const ws = connectWorkflowWS(organization.handle, workflowId, { + onInit: (state: WorkflowDOState) => { + console.log("WebSocket received initial state:", state); + try { + // Store workflow metadata - id and type are required, name and handle can be empty + if (state.id && state.type) { + setWorkflowMetadata({ + id: state.id, + name: state.name || "", + handle: state.handle || "", + type: state.type, + }); + } + + const reactFlowNodes = adaptDeploymentNodesToReactFlowNodes( + state.nodes, + nodeTemplates + ); + const reactFlowEdges = state.edges.map((edge: any, index: number) => ({ + id: `e${index}`, + source: edge.source, + target: edge.target, + sourceHandle: edge.sourceOutput, + targetHandle: edge.targetInput, + type: "workflowEdge", + data: { + isValid: true, + sourceType: edge.sourceOutput, + targetType: edge.targetInput, + }, + })); + + setNodes(reactFlowNodes); + setEdges(reactFlowEdges); + setProcessingError(null); + setIsInitializing(false); + } catch (error) { + console.error("Error processing WebSocket state:", error); + setProcessingError("Failed to load state from WebSocket"); + setIsInitializing(false); + } + }, + onOpen: () => { + console.log("WebSocket connected"); + setIsWSConnected(true); + }, + onClose: () => { + console.log("WebSocket disconnected"); + setIsWSConnected(false); + }, + onError: (error) => { + console.error("WebSocket error:", error); + setSavingError(`WebSocket error: ${error}`); + setProcessingError(`WebSocket error: ${error}`); + setIsInitializing(false); + }, + onExecutionUpdate: (execution: WorkflowExecution) => { + console.log("WebSocket received execution update:", execution); + if (onExecutionUpdate) { + // Add workflowId to the execution object + onExecutionUpdate({ + ...execution, + workflowId: workflowId, + }); + } + }, + }); + + wsRef.current = ws; + }, 100); // Small delay to avoid double-mount issues + + return () => { + clearTimeout(timeoutId); + if (wsRef.current) { + wsRef.current.disconnect(); + wsRef.current = null; + } + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [enableWebSocket, workflowId, organization?.handle]); const saveWorkflowInternal = useCallback( async ( nodesToSave: Node[], edgesToSave: Edge[] ) => { - if (!workflowId || !currentWorkflow) { - setSavingError( - "Workflow ID or current workflow data is missing, cannot save." - ); + if (!workflowId) { + setSavingError("Workflow ID is missing, cannot save."); return; } setSavingError(null); - try { - // Check if any node is currently executing, purely for logging/awareness. - // The actual node.data.executionState should be handled by the UI layer (use-workflow-state) - // and those updated nodes/edges are what we receive in nodesToSave/edgesToSave. - if ( - nodesToSave.some((node) => node.data.executionState === "executing") - ) { - console.log( - "Workflow elements are in an executing state during save." - ); - } - - const workflowNodes = nodesToSave.map((node) => { - const incomingEdges = edgesToSave.filter( - (edge) => edge.target === node.id - ); - return { - id: node.id, - name: node.data.name, - type: node.data.nodeType || "default", // Ensure nodeType is present - position: node.position, - icon: node.data.icon, - functionCalling: node.data.functionCalling, - inputs: node.data.inputs.map((input) => { - const isConnected = incomingEdges.some( - (edge) => edge.targetHandle === input.id - ); - const parameterBase: Omit & { value?: any } = - { - name: input.id, - type: input.type as ParameterType["type"], - description: input.name, - hidden: input.hidden, - required: input.required, - repeated: input.repeated, + // If WebSocket is enabled and connected, use it instead of REST API + if (enableWebSocket && wsRef.current?.isConnected()) { + try { + const workflowNodes = nodesToSave.map((node) => { + const incomingEdges = edgesToSave.filter( + (edge) => edge.target === node.id + ); + return { + id: node.id, + name: node.data.name, + type: node.data.nodeType || "default", + position: node.position, + icon: node.data.icon, + functionCalling: node.data.functionCalling, + inputs: node.data.inputs.map((input) => { + const isConnected = incomingEdges.some( + (edge) => edge.targetHandle === input.id + ); + const parameterBase: Omit & { value?: any } = + { + name: input.id, + type: input.type as ParameterType["type"], + description: input.name, + hidden: input.hidden, + required: input.required, + repeated: input.repeated, + }; + if (!isConnected && typeof input.value !== "undefined") { + parameterBase.value = input.value; + } + return parameterBase as Parameter; + }), + outputs: node.data.outputs.map((output) => { + const parameter: Parameter = { + name: output.id, + type: output.type as ParameterType["type"], + description: output.name, + hidden: output.hidden, }; - if (!isConnected && typeof input.value !== "undefined") { - parameterBase.value = input.value; - } - return parameterBase as Parameter; - }), - outputs: node.data.outputs.map((output) => { - const parameter: Parameter = { - name: output.id, - type: output.type as ParameterType["type"], - description: output.name, - hidden: output.hidden, - // value is not part of output parameters definition in the backend model here - }; - return parameter; - }), - }; - }); - - const workflowEdges = edgesToSave.map((edge) => ({ - source: edge.source, - target: edge.target, - sourceOutput: edge.sourceHandle || "", - targetInput: edge.targetHandle || "", - })); - - const workflowToSave: Workflow = { - ...currentWorkflow, // Base workflow details like name, description etc. - id: workflowId, // Ensure the ID is correctly set - nodes: workflowNodes, - edges: workflowEdges, - }; - - console.log( - "Saving workflow via useEditableWorkflow:", - workflowId, - workflowToSave - ); - - const orgHandle = organization?.handle; - - if (!orgHandle) { - throw new Error("Organization handle is required to save workflow"); - } + return parameter; + }), + }; + }); - await updateWorkflow(workflowId, workflowToSave, orgHandle); - } catch (error) { - console.error("Error saving workflow via useEditableWorkflow:", error); + const workflowEdges = edgesToSave.map((edge) => ({ + source: edge.source, + target: edge.target, + sourceOutput: edge.sourceHandle || "", + targetInput: edge.targetHandle || "", + })); - // If it's an authentication error, the user might need to refresh/login again - if (error instanceof Error && error.message.includes("Unauthorized")) { - setSavingError( - "Authentication expired. Please refresh the page or login again." - ); - } else { - setSavingError( - error instanceof Error ? error.message : "Failed to save workflow." - ); + wsRef.current.send(workflowNodes, workflowEdges); + return; + } catch (error) { + console.error("Error saving via WebSocket:", error); + setSavingError("Failed to save via WebSocket, falling back to REST API"); + // Fall through to REST API } } + + // WebSocket not available - cannot save + console.warn("WebSocket not available, workflow changes may not be saved"); + setSavingError("WebSocket not connected. Please refresh the page."); }, - [workflowId, organization, currentWorkflow] + [workflowId, organization, enableWebSocket] ); const saveWorkflow = useMemo( @@ -218,6 +241,14 @@ export function useEditableWorkflow({ [saveWorkflowInternal] ); + const startExecution = useCallback((executionId: string) => { + if (wsRef.current?.isConnected()) { + wsRef.current.executeWorkflow(executionId); + } else { + console.warn("WebSocket not connected, cannot start execution via WebSocket"); + } + }, []); + return { nodes, edges, @@ -225,5 +256,8 @@ export function useEditableWorkflow({ processingError, savingError, saveWorkflow, + isWSConnected, + startExecution, + workflowMetadata, }; } diff --git a/apps/web/src/pages/editor-page.tsx b/apps/web/src/pages/editor-page.tsx index 5dca9634..438638aa 100644 --- a/apps/web/src/pages/editor-page.tsx +++ b/apps/web/src/pages/editor-page.tsx @@ -23,6 +23,7 @@ import type { WorkflowExecution, WorkflowNodeType, } from "@/components/workflow/workflow-types"; +import type { WorkflowType } from "@dafthunk/types"; import { useEditableWorkflow } from "@/hooks/use-editable-workflow"; import { useOrgUrl } from "@/hooks/use-org-url"; import { usePageBreadcrumbs } from "@/hooks/use-page"; @@ -35,7 +36,6 @@ import { useNodeTypes } from "@/services/type-service"; import { upsertCronTrigger, useCronTrigger, - useWorkflow, useWorkflowExecution, } from "@/services/workflow-service"; @@ -54,25 +54,17 @@ export function EditorPage() { const [isEmailTriggerDialogOpen, setIsEmailTriggerDialogOpen] = useState(false); - const { - workflow: currentWorkflow, - workflowError: workflowDetailsError, - isWorkflowLoading: isWorkflowDetailsLoading, - } = useWorkflow(id || null, { revalidateOnFocus: false }); - - const { cronTrigger, isCronTriggerLoading, mutateCronTrigger } = - useCronTrigger(currentWorkflow?.type === "cron" && id ? id : null, { - revalidateOnFocus: false, - }); - - const { - deployments: deploymentHistory, - isDeploymentHistoryLoading, - mutateHistory: mutateDeploymentHistory, - } = useDeploymentHistory(id!, { revalidateOnFocus: false }); + // No longer fetching workflow via REST - using WebSocket in use-editable-workflow + // const { + // workflow: currentWorkflow, + // workflowError: workflowDetailsError, + // isWorkflowLoading: isWorkflowDetailsLoading, + // } = useWorkflow(id || null, { revalidateOnFocus: false }); + // We need workflowMetadata early, but useEditableWorkflow needs nodeTemplates + // Fetch all node types initially (no filter) const { nodeTypes, nodeTypesError, isNodeTypesLoading } = useNodeTypes( - currentWorkflow?.type, + undefined, // Fetch all node types initially { revalidateOnFocus: false } ); @@ -110,6 +102,34 @@ export function EditorPage() { return templates; }, [nodeTypes]); + // Get workflow metadata from WebSocket connection + const { + nodes: initialNodesForUI, + edges: initialEdgesForUI, + isInitializing: isWorkflowInitializing, + processingError: workflowProcessingError, + savingError: workflowSavingError, + saveWorkflow, + isWSConnected: _isWSConnected, + workflowMetadata, + } = useEditableWorkflow({ + workflowId: id, + nodeTemplates, + enableWebSocket: true, + }); + + // Now we can use workflowMetadata for cron trigger + const { cronTrigger, isCronTriggerLoading, mutateCronTrigger } = + useCronTrigger(workflowMetadata?.type === "cron" && id ? id : null, { + revalidateOnFocus: false, + }); + + const { + deployments: deploymentHistory, + isDeploymentHistoryLoading, + mutateHistory: mutateDeploymentHistory, + } = useDeploymentHistory(id!, { revalidateOnFocus: false }); + const deploymentVersions = useMemo( () => deploymentHistory.map((d) => d.version).sort((a, b) => b - a), [deploymentHistory] @@ -153,21 +173,6 @@ export function EditorPage() { [id, orgHandle, mutateCronTrigger] ); - const { - nodes: initialNodesForUI, - edges: initialEdgesForUI, - isInitializing: isWorkflowInitializing, - processingError: workflowProcessingError, - savingError: workflowSavingError, - saveWorkflow, - } = useEditableWorkflow({ - workflowId: id, - currentWorkflow, - isWorkflowDetailsLoading, - workflowDetailsError, - nodeTemplates, - }); - useEffect(() => { if (initialNodesForUI) { setLatestUiNodes(initialNodesForUI); @@ -183,21 +188,21 @@ export function EditorPage() { const handleUiNodesChanged = useCallback( (updatedNodesFromUI: Node[]) => { setLatestUiNodes(updatedNodesFromUI); - if (currentWorkflow) { + if (workflowMetadata) { saveWorkflow(updatedNodesFromUI, latestUiEdges); } }, - [latestUiEdges, saveWorkflow, currentWorkflow] + [latestUiEdges, saveWorkflow, workflowMetadata] ); const handleUiEdgesChanged = useCallback( (updatedEdgesFromUI: Edge[]) => { setLatestUiEdges(updatedEdgesFromUI); - if (currentWorkflow) { + if (workflowMetadata) { saveWorkflow(latestUiNodes, updatedEdgesFromUI); } }, - [latestUiNodes, saveWorkflow, currentWorkflow] + [latestUiNodes, saveWorkflow, workflowMetadata] ); const { @@ -216,9 +221,9 @@ export function EditorPage() { usePageBreadcrumbs( [ { label: "Workflows", to: getOrgUrl("workflows") }, - { label: currentWorkflow?.name || "Workflow" }, + { label: workflowMetadata?.name || "Workflow" }, ], - [currentWorkflow?.name] + [workflowMetadata?.name] ); const validateConnection = useCallback( @@ -259,10 +264,10 @@ export function EditorPage() { onExecutionFromBuilder, latestUiNodes, nodeTemplates as any, - currentWorkflow?.type + workflowMetadata?.type ); }, - [executeWorkflow, latestUiNodes, nodeTemplates, currentWorkflow?.type] + [executeWorkflow, latestUiNodes, nodeTemplates, workflowMetadata?.type] ); const handleRetryLoading = () => { @@ -293,16 +298,17 @@ export function EditorPage() { setWorkflowBuilderKey(Date.now()); }; - if (workflowDetailsError) { - return ( - - ); - } + // No longer using REST fetch for workflow details - handled by WebSocket + // if (workflowDetailsError) { + // return ( + // + // ); + // } if (nodeTypesError) { return ( @@ -327,7 +333,6 @@ export function EditorPage() { } if ( - isWorkflowDetailsLoading || isNodeTypesLoading || isWorkflowInitializing || isCronTriggerLoading || @@ -337,16 +342,14 @@ export function EditorPage() { } if ( - !currentWorkflow && - !isWorkflowDetailsLoading && - !workflowDetailsError && + !workflowMetadata && !isNodeTypesLoading && !nodeTypesError && !isWorkflowInitializing ) { return ( navigate(getOrgUrl("workflows"))} /> ); @@ -363,19 +366,19 @@ export function EditorPage() { setIsHttpIntegrationDialogOpen(true) : undefined } onShowEmailTrigger={ - currentWorkflow?.type === "email_message" + workflowMetadata?.type === "email_message" ? () => setIsEmailTriggerDialogOpen(true) : undefined } @@ -390,7 +393,7 @@ export function EditorPage() { createObjectUrl={createObjectUrl} /> - {currentWorkflow?.type === "http_request" && ( + {workflowMetadata?.type === "http_request" && ( setIsHttpIntegrationDialogOpen(false)} @@ -401,7 +404,7 @@ export function EditorPage() { nodeTemplates={nodeTemplates} /> )} - {currentWorkflow?.type === "http_request" && + {workflowMetadata?.type === "http_request" && executionFormParameters.length > 0 && ( )} - {currentWorkflow?.type === "http_request" && + {workflowMetadata?.type === "http_request" && executionJsonBodyParameters.length > 0 && ( )} - {currentWorkflow?.type === "email_message" && ( + {workflowMetadata?.type === "email_message" && ( setIsEmailTriggerDialogOpen(false)} orgHandle={orgHandle} - workflowHandle={currentWorkflow.handle} + workflowHandle={workflowMetadata.handle} deploymentVersion="dev" /> )} - {currentWorkflow?.type === "email_message" && ( + {workflowMetadata?.type === "email_message" && ( )} - {currentWorkflow?.type === "cron" && ( + {workflowMetadata?.type === "cron" && ( )} diff --git a/apps/web/src/services/workflow-do-service.ts b/apps/web/src/services/workflow-do-service.ts new file mode 100644 index 00000000..1ef0f962 --- /dev/null +++ b/apps/web/src/services/workflow-do-service.ts @@ -0,0 +1,184 @@ +import type { + Edge, + Node, + WorkflowDOAckMessage, + WorkflowDOErrorMessage, + WorkflowDOExecutionUpdateMessage, + WorkflowDOInitMessage, + WorkflowDOState, + WorkflowDOUpdateMessage, + WorkflowExecution, +} from "@dafthunk/types"; + +import { getApiBaseUrl } from "@/config/api"; + +// Re-export for convenience +export type { WorkflowDOState }; + +type WebSocketMessage = + | WorkflowDOInitMessage + | WorkflowDOAckMessage + | WorkflowDOErrorMessage + | WorkflowDOExecutionUpdateMessage; + +export interface WorkflowWSOptions { + onInit?: (state: WorkflowDOState) => void; + onAck?: (timestamp: number) => void; + onError?: (error: string) => void; + onClose?: () => void; + onOpen?: () => void; + onExecutionUpdate?: (execution: WorkflowExecution) => void; +} + +export class WorkflowWebSocket { + private ws: WebSocket | null = null; + private reconnectAttempts = 0; + private maxReconnectAttempts = 5; + private reconnectDelay = 1000; // Start with 1 second + private shouldReconnect = true; + + constructor( + private orgHandle: string, + private workflowId: string, + private options: WorkflowWSOptions = {} + ) {} + + connect(): void { + if ( + this.ws?.readyState === WebSocket.OPEN || + this.ws?.readyState === WebSocket.CONNECTING + ) { + return; + } + + const apiBaseUrl = getApiBaseUrl(); + const wsBaseUrl = apiBaseUrl.replace(/^http/, "ws"); + const url = `${wsBaseUrl}/${this.orgHandle}/ws?workflowId=${this.workflowId}`; + + try { + this.ws = new WebSocket(url); + + this.ws.onopen = () => { + console.log("WebSocket connected"); + this.reconnectAttempts = 0; + this.reconnectDelay = 1000; + this.options.onOpen?.(); + }; + + this.ws.onmessage = (event) => { + try { + const message = JSON.parse(event.data) as WebSocketMessage; + + if ("error" in message) { + console.error("WebSocket error message:", message.error); + this.options.onError?.(message.error || ""); + } else if (message.type === "init") { + this.options.onInit?.(message.state); + } else if (message.type === "ack") { + this.options.onAck?.(message.timestamp); + } else if (message.type === "execution_update") { + this.options.onExecutionUpdate?.({ + id: message.executionId, + workflowId: "", // Will be filled from context + status: message.status, + nodeExecutions: message.nodeExecutions, + error: message.error, + }); + } + } catch (error) { + console.error("Failed to parse WebSocket message:", error); + this.options.onError?.("Failed to parse message"); + } + }; + + this.ws.onerror = (error) => { + console.error("WebSocket error:", error); + this.options.onError?.("WebSocket connection error"); + }; + + this.ws.onclose = () => { + console.log("WebSocket closed"); + this.options.onClose?.(); + + if ( + this.shouldReconnect && + this.reconnectAttempts < this.maxReconnectAttempts + ) { + this.reconnectAttempts++; + console.log( + `Reconnecting... Attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts}` + ); + + setTimeout(() => { + this.connect(); + }, this.reconnectDelay); + + // Exponential backoff + this.reconnectDelay = Math.min(this.reconnectDelay * 2, 30000); + } + }; + } catch (error) { + console.error("Failed to create WebSocket:", error); + this.options.onError?.("Failed to create WebSocket connection"); + } + } + + send(nodes: Node[], edges: Edge[]): void { + if (this.ws?.readyState !== WebSocket.OPEN) { + console.warn("WebSocket is not open, cannot send message"); + return; + } + + try { + const updateMsg: WorkflowDOUpdateMessage = { + type: "update", + nodes, + edges, + }; + this.ws.send(JSON.stringify(updateMsg)); + } catch (error) { + console.error("Failed to send WebSocket message:", error); + this.options.onError?.("Failed to send message"); + } + } + + executeWorkflow(executionId: string): void { + if (this.ws?.readyState !== WebSocket.OPEN) { + console.warn("WebSocket is not open, cannot send execute message"); + return; + } + + try { + const executeMsg = { + type: "execute", + executionId, + }; + this.ws.send(JSON.stringify(executeMsg)); + } catch (error) { + console.error("Failed to send execute message:", error); + this.options.onError?.("Failed to send execute message"); + } + } + + disconnect(): void { + this.shouldReconnect = false; + if (this.ws) { + this.ws.close(); + this.ws = null; + } + } + + isConnected(): boolean { + return this.ws?.readyState === WebSocket.OPEN; + } +} + +export const connectWorkflowWS = ( + orgHandle: string, + workflowId: string, + options: WorkflowWSOptions = {} +): WorkflowWebSocket => { + const ws = new WorkflowWebSocket(orgHandle, workflowId, options); + ws.connect(); + return ws; +}; diff --git a/apps/web/src/utils/utils.ts b/apps/web/src/utils/utils.ts index 97bf3983..71ad94e3 100644 --- a/apps/web/src/utils/utils.ts +++ b/apps/web/src/utils/utils.ts @@ -118,7 +118,7 @@ export function adaptDeploymentNodesToReactFlowNodes( // Find the icon from nodeTemplates by matching the node type const template = nodeTemplates.find((t) => t.type === depNode.type); - const icon = template?.icon || "circle"; // fallback icon + const icon = depNode.icon || template?.icon || "circle"; // prefer persisted icon return { id: depNode.id, diff --git a/packages/types/src/workflow.ts b/packages/types/src/workflow.ts index 3f8a0296..aefcf527 100644 --- a/packages/types/src/workflow.ts +++ b/packages/types/src/workflow.ts @@ -465,3 +465,77 @@ export interface GetCronTriggerResponse { * Returns the full trigger information. */ export type UpsertCronTriggerResponse = GetCronTriggerResponse; + +/** + * WebSocket message types for Durable Object real-time sync + */ + +/** + * Workflow state stored in Durable Object + */ +export interface WorkflowDOState extends Workflow { + timestamp: number; +} + +/** + * Message sent from DO to client with initial state + */ +export interface WorkflowDOInitMessage { + type: "init"; + state: WorkflowDOState; +} + +/** + * Message sent from client to DO to update state + */ +export interface WorkflowDOUpdateMessage { + type: "update"; + nodes: Node[]; + edges: Edge[]; +} + +/** + * Acknowledgment message sent from DO to client + */ +export interface WorkflowDOAckMessage { + type: "ack"; + timestamp: number; +} + +/** + * Error message sent from DO to client + */ +export interface WorkflowDOErrorMessage { + error: string; + details?: string; +} + +/** + * Message sent from client to DO to start workflow execution + */ +export interface WorkflowDOExecuteMessage { + type: "execute"; + executionId: string; +} + +/** + * Message sent from DO to client with execution progress updates + */ +export interface WorkflowDOExecutionUpdateMessage { + type: "execution_update"; + executionId: string; + status: WorkflowExecutionStatus; + nodeExecutions: NodeExecution[]; + error?: string; +} + +/** + * All possible WebSocket messages + */ +export type WorkflowDOMessage = + | WorkflowDOInitMessage + | WorkflowDOUpdateMessage + | WorkflowDOAckMessage + | WorkflowDOErrorMessage + | WorkflowDOExecuteMessage + | WorkflowDOExecutionUpdateMessage; From e9c051ff22748b170cb095b0ff07caf89d493a0e Mon Sep 17 00:00:00 2001 From: leonardcser <73912641+leonardcser@users.noreply.github.com> Date: Tue, 30 Sep 2025 17:54:54 +0200 Subject: [PATCH 02/29] style: fix lint errors --- apps/api/src/context.ts | 2 +- apps/api/src/durable-objects/workflow-do.ts | 196 ++++++-------------- apps/api/src/index.ts | 2 +- apps/api/src/routes/workflows.ts | 7 +- apps/api/src/routes/ws.ts | 25 ++- apps/api/src/runtime/runtime.ts | 48 +---- apps/api/src/utils/encryption.test.ts | 3 +- apps/web/src/hooks/use-editable-workflow.ts | 176 ++++++++---------- apps/web/src/pages/editor-page.tsx | 23 +-- apps/web/src/utils/utils.ts | 2 +- 10 files changed, 174 insertions(+), 310 deletions(-) diff --git a/apps/api/src/context.ts b/apps/api/src/context.ts index f189ecfe..fed53f5a 100644 --- a/apps/api/src/context.ts +++ b/apps/api/src/context.ts @@ -1,7 +1,7 @@ import { JWTTokenPayload } from "@dafthunk/types"; -import { RuntimeParams } from "./runtime/runtime"; import { WorkflowDO } from "./durable-objects/workflow-do"; +import { RuntimeParams } from "./runtime/runtime"; export interface Bindings { DB: D1Database; diff --git a/apps/api/src/durable-objects/workflow-do.ts b/apps/api/src/durable-objects/workflow-do.ts index f567e5a0..be5f3151 100644 --- a/apps/api/src/durable-objects/workflow-do.ts +++ b/apps/api/src/durable-objects/workflow-do.ts @@ -1,13 +1,10 @@ import { WorkflowDOAckMessage, WorkflowDOErrorMessage, - WorkflowDOExecuteMessage, - WorkflowDOExecutionUpdateMessage, WorkflowDOInitMessage, WorkflowDOMessage, WorkflowDOState, WorkflowDOUpdateMessage, - WorkflowExecution, WorkflowType, } from "@dafthunk/types"; import { DurableObject } from "cloudflare:workers"; @@ -18,8 +15,7 @@ import { getWorkflow, updateWorkflow } from "../db/queries"; export class WorkflowDO extends DurableObject { private sql: SqlStorage; - private connectedClients: Set = new Set(); - private currentExecution: WorkflowExecution | null = null; + private workflowId: string = ""; private organizationId: string = ""; private loaded: boolean = false; @@ -66,65 +62,71 @@ export class WorkflowDO extends DurableObject { this.workflowId = workflowId; this.organizationId = organizationId; - // Ensure metadata exists - let metadataRow = this.sql - .exec("SELECT * FROM metadata WHERE id = ?", "default") - .toArray()[0]; - - if (!metadataRow) { - try { - const db = createDatabase(this.env.DB); - const workflow = await getWorkflow(db, workflowId, organizationId); - if (workflow) { - const workflowData = workflow.data as any; - this.sql.exec( - `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) - VALUES (?, ?, ?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - workflow_id = excluded.workflow_id, - organization_id = excluded.organization_id, - workflow_name = excluded.workflow_name, - workflow_handle = excluded.workflow_handle, - workflow_type = excluded.workflow_type`, - "default", - workflowId, - organizationId, - workflow.name, - workflow.handle, - (workflowData.type || "manual") as WorkflowType - ); - } else { - // Minimal metadata for new workflow - this.sql.exec( - `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) - VALUES (?, ?, ?, ?, ?, ?)`, - "default", - workflowId, - organizationId, - "New Workflow", - workflowId, - "manual" as WorkflowType - ); - } - } catch (error) { - console.error("Error loading workflow metadata:", error); + try { + const db = createDatabase(this.env.DB); + const workflow = await getWorkflow(db, workflowId, organizationId); + + const nodes = workflow + ? JSON.stringify((workflow.data as any).nodes || []) + : JSON.stringify([]); + const edges = workflow + ? JSON.stringify((workflow.data as any).edges || []) + : JSON.stringify([]); + const timestamp = workflow + ? workflow.updatedAt + ? workflow.updatedAt.getTime() + : Date.now() + : Date.now(); + + // Upsert metadata + if (workflow) { + this.sql.exec( + `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) + VALUES (?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + workflow_id = excluded.workflow_id, + organization_id = excluded.organization_id, + workflow_name = excluded.workflow_name, + workflow_handle = excluded.workflow_handle, + workflow_type = excluded.workflow_type`, + "default", + workflowId, + organizationId, + workflow.name, + workflow.handle, + ((workflow.data as any).type || "manual") as WorkflowType + ); + } else { + // Minimal metadata for new workflow + this.sql.exec( + `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) + VALUES (?, ?, ?, ?, ?, ?)`, + "default", + workflowId, + organizationId, + "New Workflow", + workflowId, + "manual" as WorkflowType + ); } - } - // Ensure states entry exists - const statesRow = this.sql - .exec("SELECT * FROM states WHERE id = ?", "default") - .toArray()[0]; - if (!statesRow) { - const timestamp = Date.now(); + // Upsert states this.sql.exec( `INSERT INTO states (id, nodes, edges, timestamp) - VALUES (?, ?, ?, ?)`, + VALUES (?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + nodes = excluded.nodes, + edges = excluded.edges, + timestamp = excluded.timestamp`, "default", - JSON.stringify([]), - JSON.stringify([]), + nodes, + edges, timestamp ); + + this.dirty = false; + } catch (error) { + console.error("Error loading workflow:", error); } this.loaded = true; @@ -235,25 +237,6 @@ export class WorkflowDO extends DurableObject { } } - private broadcastExecutionUpdate(execution: WorkflowExecution) { - const message: WorkflowDOExecutionUpdateMessage = { - type: "execution_update", - executionId: execution.id, - status: execution.status, - nodeExecutions: execution.nodeExecutions, - error: execution.error, - }; - - const messageStr = JSON.stringify(message); - for (const client of this.connectedClients) { - try { - client.send(messageStr); - } catch (error) { - console.error("Error broadcasting to client:", error); - } - } - } - async fetch(request: Request): Promise { const url = new URL(request.url); @@ -289,29 +272,6 @@ export class WorkflowDO extends DurableObject { } } - // Handle execution updates from the runtime - if (url.pathname === "/execution" && request.method === "POST") { - try { - const execution = (await request.json()) as WorkflowExecution; - await this.updateExecution(execution); - return new Response(JSON.stringify({ success: true }), { - headers: { "Content-Type": "application/json" }, - }); - } catch (error) { - console.error("Error updating execution:", error); - return new Response( - JSON.stringify({ - error: "Failed to update execution", - details: error instanceof Error ? error.message : "Unknown error", - }), - { - status: 500, - headers: { "Content-Type": "application/json" }, - } - ); - } - } - // Handle WebSocket connections (ensureLoaded called earlier if params present) const upgradeHeader = request.headers.get("Upgrade"); if (!upgradeHeader || upgradeHeader !== "websocket") { @@ -324,7 +284,6 @@ export class WorkflowDO extends DurableObject { const [client, server] = Object.values(webSocketPair); this.ctx.acceptWebSocket(server); - this.connectedClients.add(server); // Send initial state let initState: WorkflowDOState; @@ -348,11 +307,6 @@ export class WorkflowDO extends DurableObject { }; server.send(JSON.stringify(initMessage)); - // If there's an ongoing execution, send the current state - if (this.currentExecution) { - this.broadcastExecutionUpdate(this.currentExecution); - } - return new Response(null, { status: 101, webSocket: client, @@ -381,19 +335,6 @@ export class WorkflowDO extends DurableObject { timestamp: Date.now(), }; ws.send(JSON.stringify(ackMsg)); - } else if ("type" in data && data.type === "execute") { - const executeMsg = data as WorkflowDOExecuteMessage; - - // Store the execution ID so we can track updates from the runtime - this.currentExecution = { - id: executeMsg.executionId, - workflowId: this.workflowId, - status: "submitted", - nodeExecutions: [], - }; - - // Broadcast initial execution state to all clients - this.broadcastExecutionUpdate(this.currentExecution); } } catch (error) { console.error("WebSocket message error:", error); @@ -411,25 +352,6 @@ export class WorkflowDO extends DurableObject { reason: string, _wasClean: boolean ) { - this.connectedClients.delete(ws); ws.close(code, reason); } - - /** - * Called by the runtime workflow to push execution updates to connected clients - */ - async updateExecution(execution: WorkflowExecution) { - this.currentExecution = execution; - this.broadcastExecutionUpdate(execution); - - // Clear current execution if it's in a terminal state - if ( - execution.status === "completed" || - execution.status === "error" || - execution.status === "cancelled" || - execution.status === "exhausted" - ) { - this.currentExecution = null; - } - } } diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index bc19c600..21ad19a7 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,6 +1,6 @@ import { Hono } from "hono"; -export { Runtime } from "./runtime/runtime"; export { WorkflowDO } from "./durable-objects/workflow-do"; +export { Runtime } from "./runtime/runtime"; import auth from "./auth"; import { ApiContext } from "./context"; import { handleCronTriggers } from "./cron"; diff --git a/apps/api/src/routes/workflows.ts b/apps/api/src/routes/workflows.ts index 7d6c1a9c..c0073914 100644 --- a/apps/api/src/routes/workflows.ts +++ b/apps/api/src/routes/workflows.ts @@ -167,7 +167,9 @@ workflowRoutes.get("/:id", jwtMiddleware, async (c) => { try { // Get workflow from Durable Object const doId = c.env.WORKFLOW_DO.idFromName(`${userId}-${id}`); - const workflowData = await c.env.WORKFLOW_DO.get(doId).getState(); + const stub = c.env.WORKFLOW_DO.get(doId); + // @ts-ignore + const workflowData = await stub.getState(); if (!workflowData) { // If DO doesn't have it, fall back to database @@ -529,7 +531,8 @@ workflowRoutes.post( const doId = c.env.WORKFLOW_DO.idFromName( `${userId}-${workflowIdOrHandle}` ); - const state = await c.env.WORKFLOW_DO.get(doId).getState(); + const stub = c.env.WORKFLOW_DO.get(doId); + const state = await stub.getState(); if (state) { workflowData = { diff --git a/apps/api/src/routes/ws.ts b/apps/api/src/routes/ws.ts index 2db982a0..263c7770 100644 --- a/apps/api/src/routes/ws.ts +++ b/apps/api/src/routes/ws.ts @@ -1,4 +1,5 @@ import { Hono } from "hono"; + import { jwtMiddleware } from "../auth"; import { ApiContext } from "../context"; @@ -14,18 +15,30 @@ wsRoutes.get("/", jwtMiddleware, async (c) => { const userId = c.var.jwtPayload?.sub; const workflowId = c.req.query("workflowId"); - - if (!userId || !workflowId) { - console.error("Missing userId or workflowId:", { userId, workflowId }); - return c.json({ error: "Missing userId or workflowId" }, 400); + const organizationId = c.get("organizationId")!; + + if (!userId || !workflowId || !organizationId) { + console.error("Missing userId, workflowId or organizationId:", { + userId, + workflowId, + organizationId, + }); + return c.json( + { error: "Missing userId, workflowId or organizationId" }, + 400 + ); } // Create a unique DO ID for this user + workflow combination const doId = c.env.WORKFLOW_DO.idFromName(`${userId}-${workflowId}`); const stub = c.env.WORKFLOW_DO.get(doId); - // Proxy the WebSocket connection to the Durable Object - return stub.fetch(c.req.raw); + // Reconstruct request with required query params for DO + const url = new URL(c.req.url); + url.searchParams.set("organizationId", organizationId); + url.searchParams.set("workflowId", workflowId); + const newReq = new Request(url.toString(), c.req.raw); + return stub.fetch(newReq); }); export default wsRoutes; diff --git a/apps/api/src/runtime/runtime.ts b/apps/api/src/runtime/runtime.ts index d8d8b42a..bb43f6c7 100644 --- a/apps/api/src/runtime/runtime.ts +++ b/apps/api/src/runtime/runtime.ts @@ -1187,29 +1187,6 @@ export class Runtime extends WorkflowEntrypoint { return ordered.length === workflow.nodes.length ? ordered : []; } - /** - * Pushes execution update to the Durable Object for real-time updates - */ - private async pushExecutionUpdateToDO( - userId: string, - workflowId: string, - execution: WorkflowExecution - ): Promise { - try { - // Create the Durable Object ID from userId + workflowId - const doId = this.env.WORKFLOW_DO.idFromName(`${userId}-${workflowId}`); - const stub = this.env.WORKFLOW_DO.get(doId); - - await stub.updateExecution(execution); - } catch (error) { - console.error( - "Failed to push execution update to Durable Object:", - error - ); - // Don't throw - this is a non-critical operation - } - } - /** * Persists the workflow execution state to the database. */ @@ -1256,19 +1233,9 @@ export class Runtime extends WorkflowEntrypoint { ? Array.from(runtimeState.nodeErrors.values()).join(", ") : undefined; - const execution: WorkflowExecution = { - id: instanceId, - workflowId, - status: executionStatus, - nodeExecutions: nodeExecutionList, - error: errorMsg, - startedAt, - endedAt, - }; - try { const db = createDatabase(this.env.DB); - await saveExecution(db, { + return await saveExecution(db, { id: instanceId, workflowId, userId, @@ -1285,10 +1252,15 @@ export class Runtime extends WorkflowEntrypoint { // Continue without interrupting the workflow. } - // Push update to Durable Object for real-time updates - await this.pushExecutionUpdateToDO(userId, workflowId, execution); - - return execution; + return { + id: instanceId, + workflowId, + status: executionStatus, + nodeExecutions: nodeExecutionList, + error: errorMsg, + startedAt, + endedAt, + }; } /** diff --git a/apps/api/src/utils/encryption.test.ts b/apps/api/src/utils/encryption.test.ts index 4eef7c78..f322a3a2 100644 --- a/apps/api/src/utils/encryption.test.ts +++ b/apps/api/src/utils/encryption.test.ts @@ -5,6 +5,7 @@ import { beforeEach, describe, expect, it } from "vitest"; import { Bindings } from "../context"; +import { WorkflowDO } from "../durable-objects/workflow-do"; import { decryptSecret, encryptSecret } from "./encryption"; // Mock Bindings for testing @@ -20,7 +21,7 @@ const createMockEnv = (masterKey?: string): Bindings => ({ RATE_LIMIT_AUTH: {} as RateLimit, RATE_LIMIT_EXECUTE: {} as RateLimit, EXECUTE: {} as Workflow, - WORKFLOW_DO: {} as DurableObjectNamespace, + WORKFLOW_DO: {} as DurableObjectNamespace, RESSOURCES: {} as R2Bucket, DATASETS: {} as R2Bucket, DATASETS_AUTORAG: "", diff --git a/apps/web/src/hooks/use-editable-workflow.ts b/apps/web/src/hooks/use-editable-workflow.ts index f3f42d22..e1ac5450 100644 --- a/apps/web/src/hooks/use-editable-workflow.ts +++ b/apps/web/src/hooks/use-editable-workflow.ts @@ -1,4 +1,4 @@ -import type { Parameter, ParameterType, WorkflowExecution } from "@dafthunk/types"; +import type { Parameter, ParameterType } from "@dafthunk/types"; import type { Edge, Node } from "@xyflow/react"; import { useCallback, useEffect, useMemo, useRef, useState } from "react"; @@ -19,15 +19,11 @@ import { debounce } from "@/utils/utils"; interface UseEditableWorkflowProps { workflowId: string | undefined; nodeTemplates?: NodeTemplate[]; - enableWebSocket?: boolean; - onExecutionUpdate?: (execution: WorkflowExecution) => void; } export function useEditableWorkflow({ workflowId, nodeTemplates = [], - enableWebSocket = true, // Enable by default now - onExecutionUpdate, }: UseEditableWorkflowProps) { const [nodes, setNodes] = useState[]>([]); const [edges, setEdges] = useState[]>([]); @@ -43,16 +39,11 @@ export function useEditableWorkflow({ type: string; } | null>(null); - // Get the organization from the auth context at the hook level const { organization } = useAuth(); - // WebSocket is now the primary source of workflow data - // No effect needed here - state is set via WebSocket onInit callback - // WebSocket connection effect useEffect(() => { - // Don't connect if WebSocket is not enabled or missing required data - if (!enableWebSocket || !workflowId || !organization?.handle) { + if (!workflowId || !organization?.handle) { setIsInitializing(false); return; } @@ -62,7 +53,6 @@ export function useEditableWorkflow({ return; } - // Start initializing setIsInitializing(true); // Add a small delay to avoid race conditions during React strict mode double-mount @@ -73,71 +63,63 @@ export function useEditableWorkflow({ } const ws = connectWorkflowWS(organization.handle, workflowId, { - onInit: (state: WorkflowDOState) => { - console.log("WebSocket received initial state:", state); - try { - // Store workflow metadata - id and type are required, name and handle can be empty - if (state.id && state.type) { - setWorkflowMetadata({ - id: state.id, - name: state.name || "", - handle: state.handle || "", - type: state.type, - }); - } - - const reactFlowNodes = adaptDeploymentNodesToReactFlowNodes( - state.nodes, - nodeTemplates - ); - const reactFlowEdges = state.edges.map((edge: any, index: number) => ({ - id: `e${index}`, - source: edge.source, - target: edge.target, - sourceHandle: edge.sourceOutput, - targetHandle: edge.targetInput, - type: "workflowEdge", - data: { - isValid: true, - sourceType: edge.sourceOutput, - targetType: edge.targetInput, - }, - })); + onInit: (state: WorkflowDOState) => { + console.log("WebSocket received initial state:", state); + try { + // Store workflow metadata - id and type are required, name and handle can be empty + if (state.id && state.type) { + setWorkflowMetadata({ + id: state.id, + name: state.name || "", + handle: state.handle || "", + type: state.type, + }); + } + + const reactFlowNodes = adaptDeploymentNodesToReactFlowNodes( + state.nodes, + nodeTemplates + ); + const reactFlowEdges = state.edges.map( + (edge: any, index: number) => ({ + id: `e${index}`, + source: edge.source, + target: edge.target, + sourceHandle: edge.sourceOutput, + targetHandle: edge.targetInput, + type: "workflowEdge", + data: { + isValid: true, + sourceType: edge.sourceOutput, + targetType: edge.targetInput, + }, + }) + ); - setNodes(reactFlowNodes); - setEdges(reactFlowEdges); - setProcessingError(null); - setIsInitializing(false); - } catch (error) { - console.error("Error processing WebSocket state:", error); - setProcessingError("Failed to load state from WebSocket"); + setNodes(reactFlowNodes); + setEdges(reactFlowEdges); + setProcessingError(null); + setIsInitializing(false); + } catch (error) { + console.error("Error processing WebSocket state:", error); + setProcessingError("Failed to load state from WebSocket"); + setIsInitializing(false); + } + }, + onOpen: () => { + console.log("WebSocket connected"); + setIsWSConnected(true); + }, + onClose: () => { + console.log("WebSocket disconnected"); + setIsWSConnected(false); + }, + onError: (error) => { + console.error("WebSocket error:", error); + setSavingError(`WebSocket error: ${error}`); + setProcessingError(`WebSocket error: ${error}`); setIsInitializing(false); - } - }, - onOpen: () => { - console.log("WebSocket connected"); - setIsWSConnected(true); - }, - onClose: () => { - console.log("WebSocket disconnected"); - setIsWSConnected(false); - }, - onError: (error) => { - console.error("WebSocket error:", error); - setSavingError(`WebSocket error: ${error}`); - setProcessingError(`WebSocket error: ${error}`); - setIsInitializing(false); - }, - onExecutionUpdate: (execution: WorkflowExecution) => { - console.log("WebSocket received execution update:", execution); - if (onExecutionUpdate) { - // Add workflowId to the execution object - onExecutionUpdate({ - ...execution, - workflowId: workflowId, - }); - } - }, + }, }); wsRef.current = ws; @@ -150,8 +132,9 @@ export function useEditableWorkflow({ wsRef.current = null; } }; + // eslint-disable-next-line react-compiler/react-compiler // eslint-disable-next-line react-hooks/exhaustive-deps - }, [enableWebSocket, workflowId, organization?.handle]); + }, [workflowId, organization?.handle]); const saveWorkflowInternal = useCallback( async ( @@ -164,8 +147,7 @@ export function useEditableWorkflow({ } setSavingError(null); - // If WebSocket is enabled and connected, use it instead of REST API - if (enableWebSocket && wsRef.current?.isConnected()) { + if (wsRef.current?.isConnected()) { try { const workflowNodes = nodesToSave.map((node) => { const incomingEdges = edgesToSave.filter( @@ -182,15 +164,16 @@ export function useEditableWorkflow({ const isConnected = incomingEdges.some( (edge) => edge.targetHandle === input.id ); - const parameterBase: Omit & { value?: any } = - { - name: input.id, - type: input.type as ParameterType["type"], - description: input.name, - hidden: input.hidden, - required: input.required, - repeated: input.repeated, - }; + const parameterBase: Omit & { + value?: any; + } = { + name: input.id, + type: input.type as ParameterType["type"], + description: input.name, + hidden: input.hidden, + required: input.required, + repeated: input.repeated, + }; if (!isConnected && typeof input.value !== "undefined") { parameterBase.value = input.value; } @@ -219,16 +202,16 @@ export function useEditableWorkflow({ return; } catch (error) { console.error("Error saving via WebSocket:", error); - setSavingError("Failed to save via WebSocket, falling back to REST API"); - // Fall through to REST API + setSavingError("Failed to save via WebSocket"); } } - // WebSocket not available - cannot save - console.warn("WebSocket not available, workflow changes may not be saved"); + console.warn( + "WebSocket not available, workflow changes may not be saved" + ); setSavingError("WebSocket not connected. Please refresh the page."); }, - [workflowId, organization, enableWebSocket] + [workflowId] ); const saveWorkflow = useMemo( @@ -241,14 +224,6 @@ export function useEditableWorkflow({ [saveWorkflowInternal] ); - const startExecution = useCallback((executionId: string) => { - if (wsRef.current?.isConnected()) { - wsRef.current.executeWorkflow(executionId); - } else { - console.warn("WebSocket not connected, cannot start execution via WebSocket"); - } - }, []); - return { nodes, edges, @@ -257,7 +232,6 @@ export function useEditableWorkflow({ savingError, saveWorkflow, isWSConnected, - startExecution, workflowMetadata, }; } diff --git a/apps/web/src/pages/editor-page.tsx b/apps/web/src/pages/editor-page.tsx index 438638aa..cfee25fc 100644 --- a/apps/web/src/pages/editor-page.tsx +++ b/apps/web/src/pages/editor-page.tsx @@ -1,3 +1,4 @@ +import type { WorkflowType } from "@dafthunk/types"; import type { Connection, Edge, Node } from "@xyflow/react"; import { ReactFlowProvider } from "@xyflow/react"; import { useCallback, useEffect, useMemo, useState } from "react"; @@ -23,7 +24,6 @@ import type { WorkflowExecution, WorkflowNodeType, } from "@/components/workflow/workflow-types"; -import type { WorkflowType } from "@dafthunk/types"; import { useEditableWorkflow } from "@/hooks/use-editable-workflow"; import { useOrgUrl } from "@/hooks/use-org-url"; import { usePageBreadcrumbs } from "@/hooks/use-page"; @@ -54,14 +54,6 @@ export function EditorPage() { const [isEmailTriggerDialogOpen, setIsEmailTriggerDialogOpen] = useState(false); - // No longer fetching workflow via REST - using WebSocket in use-editable-workflow - // const { - // workflow: currentWorkflow, - // workflowError: workflowDetailsError, - // isWorkflowLoading: isWorkflowDetailsLoading, - // } = useWorkflow(id || null, { revalidateOnFocus: false }); - - // We need workflowMetadata early, but useEditableWorkflow needs nodeTemplates // Fetch all node types initially (no filter) const { nodeTypes, nodeTypesError, isNodeTypesLoading } = useNodeTypes( undefined, // Fetch all node types initially @@ -115,7 +107,6 @@ export function EditorPage() { } = useEditableWorkflow({ workflowId: id, nodeTemplates, - enableWebSocket: true, }); // Now we can use workflowMetadata for cron trigger @@ -298,18 +289,6 @@ export function EditorPage() { setWorkflowBuilderKey(Date.now()); }; - // No longer using REST fetch for workflow details - handled by WebSocket - // if (workflowDetailsError) { - // return ( - // - // ); - // } - if (nodeTypesError) { return ( t.type === depNode.type); - const icon = depNode.icon || template?.icon || "circle"; // prefer persisted icon + const icon = depNode.icon || template?.icon || "circle"; // fallback icon return { id: depNode.id, From c0a3571e7c739f31509db754338ea174724c85b5 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Fri, 3 Oct 2025 23:19:06 +0200 Subject: [PATCH 03/29] refactor: rename workflow-related types and update WebSocket service for improved clarity and consistency --- apps/api/src/durable-objects/workflow-do.ts | 30 ++++++------ apps/web/src/hooks/use-editable-workflow.ts | 6 +-- ...service.ts => durable-workflow-service.ts} | 26 +++++------ packages/types/src/workflow.ts | 46 +++++++++---------- 4 files changed, 54 insertions(+), 54 deletions(-) rename apps/web/src/services/{workflow-do-service.ts => durable-workflow-service.ts} (92%) diff --git a/apps/api/src/durable-objects/workflow-do.ts b/apps/api/src/durable-objects/workflow-do.ts index be5f3151..dde7effc 100644 --- a/apps/api/src/durable-objects/workflow-do.ts +++ b/apps/api/src/durable-objects/workflow-do.ts @@ -1,10 +1,10 @@ import { - WorkflowDOAckMessage, - WorkflowDOErrorMessage, - WorkflowDOInitMessage, - WorkflowDOMessage, - WorkflowDOState, - WorkflowDOUpdateMessage, + WorkflowAckMessage, + WorkflowErrorMessage, + WorkflowInitMessage, + WorkflowMessage, + WorkflowState, + WorkflowUpdateMessage, WorkflowType, } from "@dafthunk/types"; import { DurableObject } from "cloudflare:workers"; @@ -135,7 +135,7 @@ export class WorkflowDO extends DurableObject { /** * Get state from DO storage (internal use) */ - private async getStateInternal(): Promise { + private async getStateInternal(): Promise { const statesCursor = this.sql.exec( "SELECT nodes, edges, timestamp FROM states WHERE id = ?", "default" @@ -166,7 +166,7 @@ export class WorkflowDO extends DurableObject { /** * Get state (public API) */ - async getState(): Promise { + async getState(): Promise { return await this.getStateInternal(); } @@ -286,7 +286,7 @@ export class WorkflowDO extends DurableObject { this.ctx.acceptWebSocket(server); // Send initial state - let initState: WorkflowDOState; + let initState: WorkflowState; try { initState = await this.getState(); } catch { @@ -301,7 +301,7 @@ export class WorkflowDO extends DurableObject { timestamp: Date.now(), }; } - const initMessage: WorkflowDOInitMessage = { + const initMessage: WorkflowInitMessage = { type: "init", state: initState, }; @@ -316,21 +316,21 @@ export class WorkflowDO extends DurableObject { async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { try { if (typeof message !== "string") { - const errorMsg: WorkflowDOErrorMessage = { + const errorMsg: WorkflowErrorMessage = { error: "Expected string message", }; ws.send(JSON.stringify(errorMsg)); return; } - const data = JSON.parse(message) as WorkflowDOMessage; + const data = JSON.parse(message) as WorkflowMessage; if ("type" in data && data.type === "update") { - const updateMsg = data as WorkflowDOUpdateMessage; + const updateMsg = data as WorkflowUpdateMessage; await this.updateState(updateMsg.nodes, updateMsg.edges); // Optionally echo back confirmation - const ackMsg: WorkflowDOAckMessage = { + const ackMsg: WorkflowAckMessage = { type: "ack", timestamp: Date.now(), }; @@ -338,7 +338,7 @@ export class WorkflowDO extends DurableObject { } } catch (error) { console.error("WebSocket message error:", error); - const errorMsg: WorkflowDOErrorMessage = { + const errorMsg: WorkflowErrorMessage = { error: "Failed to process message", details: error instanceof Error ? error.message : "Unknown error", }; diff --git a/apps/web/src/hooks/use-editable-workflow.ts b/apps/web/src/hooks/use-editable-workflow.ts index e1ac5450..79b4b1bd 100644 --- a/apps/web/src/hooks/use-editable-workflow.ts +++ b/apps/web/src/hooks/use-editable-workflow.ts @@ -10,9 +10,9 @@ import type { } from "@/components/workflow/workflow-types"; import { connectWorkflowWS, - WorkflowDOState, + WorkflowState, WorkflowWebSocket, -} from "@/services/workflow-do-service"; +} from "@/services/durable-workflow-service.ts"; import { adaptDeploymentNodesToReactFlowNodes } from "@/utils/utils"; import { debounce } from "@/utils/utils"; @@ -63,7 +63,7 @@ export function useEditableWorkflow({ } const ws = connectWorkflowWS(organization.handle, workflowId, { - onInit: (state: WorkflowDOState) => { + onInit: (state: WorkflowState) => { console.log("WebSocket received initial state:", state); try { // Store workflow metadata - id and type are required, name and handle can be empty diff --git a/apps/web/src/services/workflow-do-service.ts b/apps/web/src/services/durable-workflow-service.ts similarity index 92% rename from apps/web/src/services/workflow-do-service.ts rename to apps/web/src/services/durable-workflow-service.ts index 1ef0f962..77d4257a 100644 --- a/apps/web/src/services/workflow-do-service.ts +++ b/apps/web/src/services/durable-workflow-service.ts @@ -1,28 +1,28 @@ import type { Edge, Node, - WorkflowDOAckMessage, - WorkflowDOErrorMessage, - WorkflowDOExecutionUpdateMessage, - WorkflowDOInitMessage, - WorkflowDOState, - WorkflowDOUpdateMessage, + WorkflowAckMessage, + WorkflowErrorMessage, + WorkflowExecutionUpdateMessage, + WorkflowInitMessage, + WorkflowState, + WorkflowUpdateMessage, WorkflowExecution, } from "@dafthunk/types"; import { getApiBaseUrl } from "@/config/api"; // Re-export for convenience -export type { WorkflowDOState }; +export type { WorkflowState }; type WebSocketMessage = - | WorkflowDOInitMessage - | WorkflowDOAckMessage - | WorkflowDOErrorMessage - | WorkflowDOExecutionUpdateMessage; + | WorkflowInitMessage + | WorkflowAckMessage + | WorkflowErrorMessage + | WorkflowExecutionUpdateMessage; export interface WorkflowWSOptions { - onInit?: (state: WorkflowDOState) => void; + onInit?: (state: WorkflowState) => void; onAck?: (timestamp: number) => void; onError?: (error: string) => void; onClose?: () => void; @@ -130,7 +130,7 @@ export class WorkflowWebSocket { } try { - const updateMsg: WorkflowDOUpdateMessage = { + const updateMsg: WorkflowUpdateMessage = { type: "update", nodes, edges, diff --git a/packages/types/src/workflow.ts b/packages/types/src/workflow.ts index aefcf527..4bdd2cb7 100644 --- a/packages/types/src/workflow.ts +++ b/packages/types/src/workflow.ts @@ -467,61 +467,61 @@ export interface GetCronTriggerResponse { export type UpsertCronTriggerResponse = GetCronTriggerResponse; /** - * WebSocket message types for Durable Object real-time sync + * WebSocket message types for websocket synchronization */ /** - * Workflow state stored in Durable Object + * Workflow state */ -export interface WorkflowDOState extends Workflow { +export interface WorkflowState extends Workflow { timestamp: number; } /** - * Message sent from DO to client with initial state + * Message sent from server to client with initial state */ -export interface WorkflowDOInitMessage { +export interface WorkflowInitMessage { type: "init"; - state: WorkflowDOState; + state: WorkflowState; } /** - * Message sent from client to DO to update state + * Message sent from client to server to update state */ -export interface WorkflowDOUpdateMessage { +export interface WorkflowUpdateMessage { type: "update"; nodes: Node[]; edges: Edge[]; } /** - * Acknowledgment message sent from DO to client + * Acknowledgment message sent from server to client */ -export interface WorkflowDOAckMessage { +export interface WorkflowAckMessage { type: "ack"; timestamp: number; } /** - * Error message sent from DO to client + * Error message sent from server to client */ -export interface WorkflowDOErrorMessage { +export interface WorkflowErrorMessage { error: string; details?: string; } /** - * Message sent from client to DO to start workflow execution + * Message sent from client to server to start workflow execution */ -export interface WorkflowDOExecuteMessage { +export interface WorkflowExecuteMessage { type: "execute"; executionId: string; } /** - * Message sent from DO to client with execution progress updates + * Message sent from server to client with execution progress updates */ -export interface WorkflowDOExecutionUpdateMessage { +export interface WorkflowExecutionUpdateMessage { type: "execution_update"; executionId: string; status: WorkflowExecutionStatus; @@ -532,10 +532,10 @@ export interface WorkflowDOExecutionUpdateMessage { /** * All possible WebSocket messages */ -export type WorkflowDOMessage = - | WorkflowDOInitMessage - | WorkflowDOUpdateMessage - | WorkflowDOAckMessage - | WorkflowDOErrorMessage - | WorkflowDOExecuteMessage - | WorkflowDOExecutionUpdateMessage; +export type WorkflowMessage = + | WorkflowInitMessage + | WorkflowUpdateMessage + | WorkflowAckMessage + | WorkflowErrorMessage + | WorkflowExecuteMessage + | WorkflowExecutionUpdateMessage; From b6dbccc15e9caf10a472df472420bdf134a8c90e Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Fri, 3 Oct 2025 23:20:33 +0200 Subject: [PATCH 04/29] refactor: reorganize workflow-related type imports for better structure and consistency --- apps/api/src/durable-objects/workflow-do.ts | 2 +- apps/web/src/services/durable-workflow-service.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/api/src/durable-objects/workflow-do.ts b/apps/api/src/durable-objects/workflow-do.ts index dde7effc..ac278086 100644 --- a/apps/api/src/durable-objects/workflow-do.ts +++ b/apps/api/src/durable-objects/workflow-do.ts @@ -4,8 +4,8 @@ import { WorkflowInitMessage, WorkflowMessage, WorkflowState, - WorkflowUpdateMessage, WorkflowType, + WorkflowUpdateMessage, } from "@dafthunk/types"; import { DurableObject } from "cloudflare:workers"; diff --git a/apps/web/src/services/durable-workflow-service.ts b/apps/web/src/services/durable-workflow-service.ts index 77d4257a..77541c65 100644 --- a/apps/web/src/services/durable-workflow-service.ts +++ b/apps/web/src/services/durable-workflow-service.ts @@ -3,11 +3,11 @@ import type { Node, WorkflowAckMessage, WorkflowErrorMessage, + WorkflowExecution, WorkflowExecutionUpdateMessage, WorkflowInitMessage, WorkflowState, WorkflowUpdateMessage, - WorkflowExecution, } from "@dafthunk/types"; import { getApiBaseUrl } from "@/config/api"; From eec2b936aca1dd30164b03773c74a729851b4da0 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Fri, 3 Oct 2025 23:45:08 +0200 Subject: [PATCH 05/29] refactor: rename WorkflowDO to DurableWorkflow and update related bindings for improved clarity --- apps/api/src/context.ts | 4 ++-- .../{workflow-do.ts => durable-workflow.ts} | 18 +++++------------- apps/api/src/index.ts | 2 +- apps/api/src/routes/workflows.ts | 8 ++++---- apps/api/src/routes/ws.ts | 4 ++-- apps/api/src/utils/encryption.test.ts | 4 ++-- apps/api/wrangler.jsonc | 10 +++++----- .../src/services/durable-workflow-service.ts | 5 ----- packages/types/src/workflow.ts | 9 --------- 9 files changed, 21 insertions(+), 43 deletions(-) rename apps/api/src/durable-objects/{workflow-do.ts => durable-workflow.ts} (95%) diff --git a/apps/api/src/context.ts b/apps/api/src/context.ts index fed53f5a..2da1be9d 100644 --- a/apps/api/src/context.ts +++ b/apps/api/src/context.ts @@ -1,6 +1,6 @@ import { JWTTokenPayload } from "@dafthunk/types"; -import { WorkflowDO } from "./durable-objects/workflow-do"; +import { DurableWorkflow } from "./durable-objects/durable-workflow"; import { RuntimeParams } from "./runtime/runtime"; export interface Bindings { @@ -10,7 +10,7 @@ export interface Bindings { RATE_LIMIT_AUTH: RateLimit; RATE_LIMIT_EXECUTE: RateLimit; EXECUTE: Workflow; - WORKFLOW_DO: DurableObjectNamespace; + DURABLE_WORKFLOW: DurableObjectNamespace; RESSOURCES: R2Bucket; DATASETS: R2Bucket; DATASETS_AUTORAG: string; diff --git a/apps/api/src/durable-objects/workflow-do.ts b/apps/api/src/durable-objects/durable-workflow.ts similarity index 95% rename from apps/api/src/durable-objects/workflow-do.ts rename to apps/api/src/durable-objects/durable-workflow.ts index ac278086..a8d89733 100644 --- a/apps/api/src/durable-objects/workflow-do.ts +++ b/apps/api/src/durable-objects/durable-workflow.ts @@ -1,5 +1,4 @@ import { - WorkflowAckMessage, WorkflowErrorMessage, WorkflowInitMessage, WorkflowMessage, @@ -13,7 +12,7 @@ import { Bindings } from "../context"; import { createDatabase } from "../db/index"; import { getWorkflow, updateWorkflow } from "../db/queries"; -export class WorkflowDO extends DurableObject { +export class DurableWorkflow extends DurableObject { private sql: SqlStorage; private workflowId: string = ""; @@ -49,7 +48,7 @@ export class WorkflowDO extends DurableObject { } /** - * Load workflow from database into DO storage if not already loaded + * Load workflow from database into durable storage if not already loaded */ private async ensureLoaded( workflowId: string, @@ -133,7 +132,7 @@ export class WorkflowDO extends DurableObject { } /** - * Get state from DO storage (internal use) + * Get state from durable storage (internal use) */ private async getStateInternal(): Promise { const statesCursor = this.sql.exec( @@ -195,7 +194,7 @@ export class WorkflowDO extends DurableObject { } /** - * Persist DO state back to database + * Persist durable state back to database */ private async persistToDatabase(): Promise { if (!this.dirty || !this.workflowId || !this.organizationId) { @@ -228,7 +227,7 @@ export class WorkflowDO extends DurableObject { * Alarm handler - called when alarm fires */ async alarm(): Promise { - console.log("Alarm fired for WorkflowDO"); + console.log("Alarm fired for DurableWorkflow"); await this.persistToDatabase(); // If still dirty (updates happened during persist), schedule another alarm @@ -328,13 +327,6 @@ export class WorkflowDO extends DurableObject { if ("type" in data && data.type === "update") { const updateMsg = data as WorkflowUpdateMessage; await this.updateState(updateMsg.nodes, updateMsg.edges); - - // Optionally echo back confirmation - const ackMsg: WorkflowAckMessage = { - type: "ack", - timestamp: Date.now(), - }; - ws.send(JSON.stringify(ackMsg)); } } catch (error) { console.error("WebSocket message error:", error); diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 21ad19a7..19b35d04 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,5 +1,5 @@ import { Hono } from "hono"; -export { WorkflowDO } from "./durable-objects/workflow-do"; +export { DurableWorkflow } from "./durable-objects/durable-workflow"; export { Runtime } from "./runtime/runtime"; import auth from "./auth"; import { ApiContext } from "./context"; diff --git a/apps/api/src/routes/workflows.ts b/apps/api/src/routes/workflows.ts index c0073914..50d2d095 100644 --- a/apps/api/src/routes/workflows.ts +++ b/apps/api/src/routes/workflows.ts @@ -166,8 +166,8 @@ workflowRoutes.get("/:id", jwtMiddleware, async (c) => { try { // Get workflow from Durable Object - const doId = c.env.WORKFLOW_DO.idFromName(`${userId}-${id}`); - const stub = c.env.WORKFLOW_DO.get(doId); + const doId = c.env.DURABLE_WORKFLOW.idFromName(`${userId}-${id}`); + const stub = c.env.DURABLE_WORKFLOW.get(doId); // @ts-ignore const workflowData = await stub.getState(); @@ -528,10 +528,10 @@ workflowRoutes.post( userId = "api"; // Use a placeholder for API-triggered executions } - const doId = c.env.WORKFLOW_DO.idFromName( + const doId = c.env.DURABLE_WORKFLOW.idFromName( `${userId}-${workflowIdOrHandle}` ); - const stub = c.env.WORKFLOW_DO.get(doId); + const stub = c.env.DURABLE_WORKFLOW.get(doId); const state = await stub.getState(); if (state) { diff --git a/apps/api/src/routes/ws.ts b/apps/api/src/routes/ws.ts index 263c7770..6f578030 100644 --- a/apps/api/src/routes/ws.ts +++ b/apps/api/src/routes/ws.ts @@ -30,8 +30,8 @@ wsRoutes.get("/", jwtMiddleware, async (c) => { } // Create a unique DO ID for this user + workflow combination - const doId = c.env.WORKFLOW_DO.idFromName(`${userId}-${workflowId}`); - const stub = c.env.WORKFLOW_DO.get(doId); + const doId = c.env.DURABLE_WORKFLOW.idFromName(`${userId}-${workflowId}`); + const stub = c.env.DURABLE_WORKFLOW.get(doId); // Reconstruct request with required query params for DO const url = new URL(c.req.url); diff --git a/apps/api/src/utils/encryption.test.ts b/apps/api/src/utils/encryption.test.ts index f322a3a2..39d75c46 100644 --- a/apps/api/src/utils/encryption.test.ts +++ b/apps/api/src/utils/encryption.test.ts @@ -5,7 +5,7 @@ import { beforeEach, describe, expect, it } from "vitest"; import { Bindings } from "../context"; -import { WorkflowDO } from "../durable-objects/workflow-do"; +import { DurableWorkflow } from "../durable-objects/durable-workflow"; import { decryptSecret, encryptSecret } from "./encryption"; // Mock Bindings for testing @@ -21,7 +21,7 @@ const createMockEnv = (masterKey?: string): Bindings => ({ RATE_LIMIT_AUTH: {} as RateLimit, RATE_LIMIT_EXECUTE: {} as RateLimit, EXECUTE: {} as Workflow, - WORKFLOW_DO: {} as DurableObjectNamespace, + DURABLE_WORKFLOW: {} as DurableObjectNamespace, RESSOURCES: {} as R2Bucket, DATASETS: {} as R2Bucket, DATASETS_AUTORAG: "", diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index a9a1a8bd..7918aa7e 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -70,8 +70,8 @@ "durable_objects": { "bindings": [ { - "name": "WORKFLOW_DO", - "class_name": "WorkflowDO", + "name": "DURABLE_WORKFLOW", + "class_name": "DurableWorkflow", "script_name": "dafthunk-api" } ] @@ -79,7 +79,7 @@ "migrations": [ { "tag": "v1", - "new_sqlite_classes": ["WorkflowDO"] + "new_sqlite_classes": ["DurableWorkflow"] } ], "unsafe": { @@ -173,8 +173,8 @@ "durable_objects": { "bindings": [ { - "name": "WORKFLOW_DO", - "class_name": "WorkflowDO", + "name": "DURABLE_WORKFLOW", + "class_name": "DurableWorkflow", "script_name": "dafthunk-api" } ] diff --git a/apps/web/src/services/durable-workflow-service.ts b/apps/web/src/services/durable-workflow-service.ts index 77541c65..ab489fae 100644 --- a/apps/web/src/services/durable-workflow-service.ts +++ b/apps/web/src/services/durable-workflow-service.ts @@ -1,7 +1,6 @@ import type { Edge, Node, - WorkflowAckMessage, WorkflowErrorMessage, WorkflowExecution, WorkflowExecutionUpdateMessage, @@ -17,13 +16,11 @@ export type { WorkflowState }; type WebSocketMessage = | WorkflowInitMessage - | WorkflowAckMessage | WorkflowErrorMessage | WorkflowExecutionUpdateMessage; export interface WorkflowWSOptions { onInit?: (state: WorkflowState) => void; - onAck?: (timestamp: number) => void; onError?: (error: string) => void; onClose?: () => void; onOpen?: () => void; @@ -74,8 +71,6 @@ export class WorkflowWebSocket { this.options.onError?.(message.error || ""); } else if (message.type === "init") { this.options.onInit?.(message.state); - } else if (message.type === "ack") { - this.options.onAck?.(message.timestamp); } else if (message.type === "execution_update") { this.options.onExecutionUpdate?.({ id: message.executionId, diff --git a/packages/types/src/workflow.ts b/packages/types/src/workflow.ts index 4bdd2cb7..f89a65d0 100644 --- a/packages/types/src/workflow.ts +++ b/packages/types/src/workflow.ts @@ -494,14 +494,6 @@ export interface WorkflowUpdateMessage { edges: Edge[]; } -/** - * Acknowledgment message sent from server to client - */ -export interface WorkflowAckMessage { - type: "ack"; - timestamp: number; -} - /** * Error message sent from server to client */ @@ -535,7 +527,6 @@ export interface WorkflowExecutionUpdateMessage { export type WorkflowMessage = | WorkflowInitMessage | WorkflowUpdateMessage - | WorkflowAckMessage | WorkflowErrorMessage | WorkflowExecuteMessage | WorkflowExecutionUpdateMessage; From 0a27289d0c0eb3e1700eed608e05174feb976db0 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 00:08:31 +0200 Subject: [PATCH 06/29] feat: enhance workflow state management with node drag handling and SQLite data persistence --- .../src/durable-objects/durable-workflow.ts | 37 +++++++++------ .../components/workflow/use-workflow-state.ts | 47 +++++++++++++------ .../components/workflow/workflow-builder.tsx | 2 + .../components/workflow/workflow-canvas.tsx | 6 +++ apps/web/src/hooks/use-editable-workflow.ts | 28 +++++------ apps/web/src/pages/editor-page.tsx | 20 ++++---- 6 files changed, 88 insertions(+), 52 deletions(-) diff --git a/apps/api/src/durable-objects/durable-workflow.ts b/apps/api/src/durable-objects/durable-workflow.ts index a8d89733..57ea437a 100644 --- a/apps/api/src/durable-objects/durable-workflow.ts +++ b/apps/api/src/durable-objects/durable-workflow.ts @@ -62,6 +62,23 @@ export class DurableWorkflow extends DurableObject { this.organizationId = organizationId; try { + // First check if SQLite storage already has data (from previous session) + // This is important because SQLite storage persists across cold starts + const existingMetadata = this.sql + .exec("SELECT workflow_id FROM metadata WHERE id = ?", "default") + .toArray(); + + if (existingMetadata.length > 0) { + // SQLite storage has data, use it (it's more recent than D1) + console.log( + `Using existing SQLite storage for workflow ${workflowId}` + ); + this.loaded = true; + return; + } + + // SQLite storage is empty, load from D1 database + console.log(`Loading workflow ${workflowId} from D1 database`); const db = createDatabase(this.env.DB); const workflow = await getWorkflow(db, workflowId, organizationId); @@ -77,17 +94,11 @@ export class DurableWorkflow extends DurableObject { : Date.now() : Date.now(); - // Upsert metadata + // Insert metadata if (workflow) { this.sql.exec( `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) - VALUES (?, ?, ?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - workflow_id = excluded.workflow_id, - organization_id = excluded.organization_id, - workflow_name = excluded.workflow_name, - workflow_handle = excluded.workflow_handle, - workflow_type = excluded.workflow_type`, + VALUES (?, ?, ?, ?, ?, ?)`, "default", workflowId, organizationId, @@ -109,14 +120,10 @@ export class DurableWorkflow extends DurableObject { ); } - // Upsert states + // Insert states this.sql.exec( `INSERT INTO states (id, nodes, edges, timestamp) - VALUES (?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - nodes = excluded.nodes, - edges = excluded.edges, - timestamp = excluded.timestamp`, + VALUES (?, ?, ?, ?)`, "default", nodes, edges, @@ -326,6 +333,8 @@ export class DurableWorkflow extends DurableObject { if ("type" in data && data.type === "update") { const updateMsg = data as WorkflowUpdateMessage; + + // Update with the new state await this.updateState(updateMsg.nodes, updateMsg.edges); } } catch (error) { diff --git a/apps/web/src/components/workflow/use-workflow-state.ts b/apps/web/src/components/workflow/use-workflow-state.ts index 5d6c0c0b..e5811c81 100644 --- a/apps/web/src/components/workflow/use-workflow-state.ts +++ b/apps/web/src/components/workflow/use-workflow-state.ts @@ -65,6 +65,10 @@ interface UseWorkflowStateReturn { onConnect: OnConnect; onConnectStart: OnConnectStart; onConnectEnd: OnConnectEnd; + onNodeDragStop: ( + event: React.MouseEvent, + node: ReactFlowNode + ) => void; connectionValidationState: ConnectionValidationState; isValidConnection: IsValidConnection>; handleAddNode: () => void; @@ -95,11 +99,12 @@ interface UseWorkflowStateReturn { // Helper functions to replace workflowNodeStateService const stripExecutionFields = ( data: WorkflowNodeType -): Omit & { +): Omit & { outputs: Omit[]; inputs: Omit[]; } => { - const { executionState, error, ...rest } = data; + // Exclude nodeTemplates from comparison - it's UI metadata that shouldn't trigger persistence + const { executionState, error, nodeTemplates, ...rest } = data; return { ...rest, @@ -339,33 +344,36 @@ export function useWorkflowState({ } }, [initialEdges, readonly, setEdges]); - // Effect to notify parent of changes for nodes + // Effect to notify parent of changes for nodes (excluding position changes during drag) useEffect(() => { if (readonly) return; const nodeCountChanged = nodes.length !== initialNodes.length; - const hasDataOrPositionChanges = nodes.some((node) => { - const initialNode = initialNodes.find((n) => n.id === node.id); - if (!initialNode) return true; - if ( - node.position.x !== initialNode.position.x || - node.position.y !== initialNode.position.y - ) { - return true; + // Check for data changes (excluding position) + let hasDataChanges = false; + nodes.forEach((node) => { + const initialNode = initialNodes.find((n) => n.id === node.id); + if (!initialNode) { + hasDataChanges = true; + return; } + // Check data changes (not position) const nodeData = stripExecutionFields(node.data); const initialNodeData = stripExecutionFields(initialNode.data); - return JSON.stringify(nodeData) !== JSON.stringify(initialNodeData); + if (JSON.stringify(nodeData) !== JSON.stringify(initialNodeData)) { + hasDataChanges = true; + } }); - // Check for deleted nodes by looking for initialNodes that don't exist in the current nodes + // Check for deleted nodes const hasDeletedNodes = initialNodes.some( (initialNode) => !nodes.some((node) => node.id === initialNode.id) ); - if (nodeCountChanged || hasDataOrPositionChanges || hasDeletedNodes) { + // Save for data changes or node add/delete (position changes handled by onNodeDragStop) + if (nodeCountChanged || hasDataChanges || hasDeletedNodes) { onNodesChangePersistCallback?.(nodes); } }, [nodes, onNodesChangePersistCallback, initialNodes, readonly]); @@ -432,6 +440,16 @@ export function useWorkflowState({ setConnectionValidationState("default"); }, [readonly]); + // Handle node drag stop - save positions after drag completes + const onNodeDragStop = useCallback( + (_event: React.MouseEvent, _node: ReactFlowNode) => { + if (readonly) return; + // Save with current node positions after drag completes + onNodesChangePersistCallback?.(nodes); + }, + [readonly, nodes, onNodesChangePersistCallback] + ); + // Function to validate connection based on type compatibility const isValidConnection = useCallback( (connection: any) => { @@ -1128,6 +1146,7 @@ export function useWorkflowState({ onConnect, onConnectStart, onConnectEnd, + onNodeDragStop: readonly ? () => {} : onNodeDragStop, connectionValidationState, isValidConnection, handleAddNode, diff --git a/apps/web/src/components/workflow/workflow-builder.tsx b/apps/web/src/components/workflow/workflow-builder.tsx index 469879b0..40dccb18 100644 --- a/apps/web/src/components/workflow/workflow-builder.tsx +++ b/apps/web/src/components/workflow/workflow-builder.tsx @@ -125,6 +125,7 @@ export function WorkflowBuilder({ cutSelected, pasteFromClipboard, hasClipboardData, + onNodeDragStop, } = useWorkflowState({ initialNodes, initialEdges, @@ -431,6 +432,7 @@ export function WorkflowBuilder({ onConnectStart={readonly ? () => {} : onConnectStart} onConnectEnd={readonly ? () => {} : onConnectEnd} onNodeDoubleClick={handleNodeDoubleClick} + onNodeDragStop={onNodeDragStop} onInit={setReactFlowInstance} onAddNode={readonly ? undefined : handleAddNode} onAction={ diff --git a/apps/web/src/components/workflow/workflow-canvas.tsx b/apps/web/src/components/workflow/workflow-canvas.tsx index f39fcf22..d7777c0c 100644 --- a/apps/web/src/components/workflow/workflow-canvas.tsx +++ b/apps/web/src/components/workflow/workflow-canvas.tsx @@ -152,6 +152,10 @@ export interface WorkflowCanvasProps { onConnect: OnConnect; onConnectStart: OnConnectStart; onConnectEnd: OnConnectEnd; + onNodeDragStop: ( + event: React.MouseEvent, + node: ReactFlowNode + ) => void; onNodeDoubleClick?: (event: React.MouseEvent) => void; onInit: ( instance: ReactFlowInstance< @@ -695,6 +699,7 @@ export function WorkflowCanvas({ onConnectStart, onConnectEnd, onNodeDoubleClick, + onNodeDragStop, onInit, onAddNode, onAction, @@ -765,6 +770,7 @@ export function WorkflowCanvas({ onConnectStart={readonly ? () => {} : onConnectStart} onConnectEnd={readonly ? () => {} : onConnectEnd} onNodeDoubleClick={onNodeDoubleClick} + onNodeDragStop={onNodeDragStop} nodeTypes={nodeTypes} edgeTypes={edgeTypes} connectionMode={ConnectionMode.Strict} diff --git a/apps/web/src/hooks/use-editable-workflow.ts b/apps/web/src/hooks/use-editable-workflow.ts index 79b4b1bd..a323ba90 100644 --- a/apps/web/src/hooks/use-editable-workflow.ts +++ b/apps/web/src/hooks/use-editable-workflow.ts @@ -1,6 +1,6 @@ import type { Parameter, ParameterType } from "@dafthunk/types"; import type { Edge, Node } from "@xyflow/react"; -import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { useCallback, useEffect, useRef, useState } from "react"; import { useAuth } from "@/components/auth-context"; import type { @@ -14,7 +14,6 @@ import { WorkflowWebSocket, } from "@/services/durable-workflow-service.ts"; import { adaptDeploymentNodesToReactFlowNodes } from "@/utils/utils"; -import { debounce } from "@/utils/utils"; interface UseEditableWorkflowProps { workflowId: string | undefined; @@ -64,9 +63,8 @@ export function useEditableWorkflow({ const ws = connectWorkflowWS(organization.handle, workflowId, { onInit: (state: WorkflowState) => { - console.log("WebSocket received initial state:", state); try { - // Store workflow metadata - id and type are required, name and handle can be empty + // Store workflow metadata if (state.id && state.type) { setWorkflowMetadata({ id: state.id, @@ -76,6 +74,7 @@ export function useEditableWorkflow({ }); } + // Convert to ReactFlow format const reactFlowNodes = adaptDeploymentNodesToReactFlowNodes( state.nodes, nodeTemplates @@ -107,11 +106,9 @@ export function useEditableWorkflow({ } }, onOpen: () => { - console.log("WebSocket connected"); setIsWSConnected(true); }, onClose: () => { - console.log("WebSocket disconnected"); setIsWSConnected(false); }, onError: (error) => { @@ -141,6 +138,12 @@ export function useEditableWorkflow({ nodesToSave: Node[], edgesToSave: Edge[] ) => { + // Block saves during initialization to prevent race condition where + // nodeTemplates load before edges, causing empty edges to be saved + if (isInitializing) { + return; + } + if (!workflowId) { setSavingError("Workflow ID is missing, cannot save."); return; @@ -211,18 +214,11 @@ export function useEditableWorkflow({ ); setSavingError("WebSocket not connected. Please refresh the page."); }, - [workflowId] + [workflowId, isInitializing] ); - const saveWorkflow = useMemo( - () => - debounce( - (nodes: Node[], edges: Edge[]) => - saveWorkflowInternal(nodes, edges), - 1000 - ), - [saveWorkflowInternal] - ); + // No debouncing needed - WebSocket handles message batching naturally + const saveWorkflow = saveWorkflowInternal; return { nodes, diff --git a/apps/web/src/pages/editor-page.tsx b/apps/web/src/pages/editor-page.tsx index cfee25fc..4fb7975e 100644 --- a/apps/web/src/pages/editor-page.tsx +++ b/apps/web/src/pages/editor-page.tsx @@ -1,7 +1,7 @@ import type { WorkflowType } from "@dafthunk/types"; import type { Connection, Edge, Node } from "@xyflow/react"; import { ReactFlowProvider } from "@xyflow/react"; -import { useCallback, useEffect, useMemo, useState } from "react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { useNavigate, useParams } from "react-router"; import { toast } from "sonner"; @@ -126,12 +126,14 @@ export function EditorPage() { [deploymentHistory] ); + // Only track nodes since they're used by validateConnection and editorExecuteWorkflow + // Edges are only passed through to saveWorkflow and don't need to be stored const [latestUiNodes, setLatestUiNodes] = useState[]>( [] ); - const [latestUiEdges, setLatestUiEdges] = useState[]>( - [] - ); + + // Store edges in a ref to avoid stale closures without causing re-renders + const latestUiEdgesRef = useRef[]>([]); const handleOpenSetCronDialog = useCallback(() => { mutateDeploymentHistory(); @@ -172,7 +174,7 @@ export function EditorPage() { useEffect(() => { if (initialEdgesForUI) { - setLatestUiEdges(initialEdgesForUI); + latestUiEdgesRef.current = initialEdgesForUI; } }, [initialEdgesForUI]); @@ -180,16 +182,18 @@ export function EditorPage() { (updatedNodesFromUI: Node[]) => { setLatestUiNodes(updatedNodesFromUI); if (workflowMetadata) { - saveWorkflow(updatedNodesFromUI, latestUiEdges); + // Use ref for edges to get current value without stale closure + saveWorkflow(updatedNodesFromUI, latestUiEdgesRef.current); } }, - [latestUiEdges, saveWorkflow, workflowMetadata] + [saveWorkflow, workflowMetadata] ); const handleUiEdgesChanged = useCallback( (updatedEdgesFromUI: Edge[]) => { - setLatestUiEdges(updatedEdgesFromUI); + latestUiEdgesRef.current = updatedEdgesFromUI; if (workflowMetadata) { + // Use state for nodes since it's always current in this callback saveWorkflow(latestUiNodes, updatedEdgesFromUI); } }, From 81d651f0c13607668dac8979f82f5e462caee2ae Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 00:09:17 +0200 Subject: [PATCH 07/29] style: simplify console log statement in DurableWorkflow class --- apps/api/src/durable-objects/durable-workflow.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/apps/api/src/durable-objects/durable-workflow.ts b/apps/api/src/durable-objects/durable-workflow.ts index 57ea437a..b47d1b28 100644 --- a/apps/api/src/durable-objects/durable-workflow.ts +++ b/apps/api/src/durable-objects/durable-workflow.ts @@ -70,9 +70,7 @@ export class DurableWorkflow extends DurableObject { if (existingMetadata.length > 0) { // SQLite storage has data, use it (it's more recent than D1) - console.log( - `Using existing SQLite storage for workflow ${workflowId}` - ); + console.log(`Using existing SQLite storage for workflow ${workflowId}`); this.loaded = true; return; } From f8113be7f0cceee1fa057fbfa888a5cce1ded355 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 00:26:41 +0200 Subject: [PATCH 08/29] refactor: streamline DurableWorkflow class by consolidating database interactions and enhancing state management --- .../src/durable-objects/durable-workflow.ts | 249 ++++++++---------- apps/api/src/routes/workflows.ts | 77 ++---- 2 files changed, 136 insertions(+), 190 deletions(-) diff --git a/apps/api/src/durable-objects/durable-workflow.ts b/apps/api/src/durable-objects/durable-workflow.ts index b47d1b28..b7312d5f 100644 --- a/apps/api/src/durable-objects/durable-workflow.ts +++ b/apps/api/src/durable-objects/durable-workflow.ts @@ -13,8 +13,10 @@ import { createDatabase } from "../db/index"; import { getWorkflow, updateWorkflow } from "../db/queries"; export class DurableWorkflow extends DurableObject { - private sql: SqlStorage; + private static readonly PERSIST_DELAY_MS = 60_000; + private static readonly STORAGE_ID = "current"; + private sql: SqlStorage; private workflowId: string = ""; private organizationId: string = ""; private loaded: boolean = false; @@ -28,23 +30,18 @@ export class DurableWorkflow extends DurableObject { private initDatabase() { this.sql.exec(` - CREATE TABLE IF NOT EXISTS states ( - id TEXT PRIMARY KEY, + CREATE TABLE IF NOT EXISTS workflow ( + id TEXT PRIMARY KEY DEFAULT 'current', + workflow_id TEXT NOT NULL, + organization_id TEXT NOT NULL, + name TEXT NOT NULL, + handle TEXT NOT NULL, + type TEXT NOT NULL, nodes TEXT NOT NULL, edges TEXT NOT NULL, timestamp INTEGER NOT NULL ) `); - this.sql.exec(` - CREATE TABLE IF NOT EXISTS metadata ( - id TEXT PRIMARY KEY, - workflow_id TEXT NOT NULL, - organization_id TEXT NOT NULL, - workflow_name TEXT NOT NULL, - workflow_handle TEXT NOT NULL, - workflow_type TEXT NOT NULL - ) - `); } /** @@ -64,12 +61,14 @@ export class DurableWorkflow extends DurableObject { try { // First check if SQLite storage already has data (from previous session) // This is important because SQLite storage persists across cold starts - const existingMetadata = this.sql - .exec("SELECT workflow_id FROM metadata WHERE id = ?", "default") + const existing = this.sql + .exec( + "SELECT workflow_id FROM workflow WHERE id = ?", + DurableWorkflow.STORAGE_ID + ) .toArray(); - if (existingMetadata.length > 0) { - // SQLite storage has data, use it (it's more recent than D1) + if (existing.length > 0) { console.log(`Using existing SQLite storage for workflow ${workflowId}`); this.loaded = true; return; @@ -80,49 +79,18 @@ export class DurableWorkflow extends DurableObject { const db = createDatabase(this.env.DB); const workflow = await getWorkflow(db, workflowId, organizationId); - const nodes = workflow - ? JSON.stringify((workflow.data as any).nodes || []) - : JSON.stringify([]); - const edges = workflow - ? JSON.stringify((workflow.data as any).edges || []) - : JSON.stringify([]); - const timestamp = workflow - ? workflow.updatedAt - ? workflow.updatedAt.getTime() - : Date.now() - : Date.now(); - - // Insert metadata - if (workflow) { - this.sql.exec( - `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) - VALUES (?, ?, ?, ?, ?, ?)`, - "default", - workflowId, - organizationId, - workflow.name, - workflow.handle, - ((workflow.data as any).type || "manual") as WorkflowType - ); - } else { - // Minimal metadata for new workflow - this.sql.exec( - `INSERT INTO metadata (id, workflow_id, organization_id, workflow_name, workflow_handle, workflow_type) - VALUES (?, ?, ?, ?, ?, ?)`, - "default", - workflowId, - organizationId, - "New Workflow", - workflowId, - "manual" as WorkflowType - ); - } + const { name, handle, type, nodes, edges, timestamp } = + this.extractWorkflowData(workflow); - // Insert states this.sql.exec( - `INSERT INTO states (id, nodes, edges, timestamp) - VALUES (?, ?, ?, ?)`, - "default", + `INSERT INTO workflow (id, workflow_id, organization_id, name, handle, type, nodes, edges, timestamp) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, + DurableWorkflow.STORAGE_ID, + workflowId, + organizationId, + name, + handle, + type, nodes, edges, timestamp @@ -136,65 +104,62 @@ export class DurableWorkflow extends DurableObject { this.loaded = true; } - /** - * Get state from durable storage (internal use) - */ - private async getStateInternal(): Promise { - const statesCursor = this.sql.exec( - "SELECT nodes, edges, timestamp FROM states WHERE id = ?", - "default" - ); - const statesRow = statesCursor.toArray()[0]; - - const metadataCursor = this.sql.exec( - "SELECT workflow_id as id, workflow_name as name, workflow_handle as handle, workflow_type as type FROM metadata WHERE id = ?", - "default" - ); - const metadataRow = metadataCursor.toArray()[0]; - - if (!statesRow || !metadataRow) { - throw new Error("State or metadata missing; call ensureLoaded first"); - } - + private extractWorkflowData(workflow: any) { return { - id: metadataRow.id as string, - name: metadataRow.name as string, - handle: metadataRow.handle as string, - type: metadataRow.type as WorkflowType, - nodes: JSON.parse(statesRow.nodes as string), - edges: JSON.parse(statesRow.edges as string), - timestamp: statesRow.timestamp as number, + name: workflow?.name || "New Workflow", + handle: workflow?.handle || this.workflowId, + type: (workflow?.data?.type || "manual") as WorkflowType, + nodes: JSON.stringify(workflow?.data?.nodes || []), + edges: JSON.stringify(workflow?.data?.edges || []), + timestamp: workflow?.updatedAt?.getTime() || Date.now(), }; } /** - * Get state (public API) + * Get state from durable storage */ async getState(): Promise { - return await this.getStateInternal(); + const row = this.sql + .exec( + `SELECT workflow_id as id, name, handle, type, nodes, edges, timestamp + FROM workflow WHERE id = ?`, + DurableWorkflow.STORAGE_ID + ) + .toArray()[0]; + + if (!row) { + throw new Error("State missing; call ensureLoaded first"); + } + + return { + id: row.id as string, + name: row.name as string, + handle: row.handle as string, + type: row.type as WorkflowType, + nodes: JSON.parse(row.nodes as string), + edges: JSON.parse(row.edges as string), + timestamp: row.timestamp as number, + }; } async updateState(nodes: unknown[], edges: unknown[]): Promise { const timestamp = Date.now(); this.sql.exec( - `INSERT INTO states (id, nodes, edges, timestamp) - VALUES (?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - nodes = excluded.nodes, - edges = excluded.edges, - timestamp = excluded.timestamp`, - "default", + `UPDATE workflow SET nodes = ?, edges = ?, timestamp = ? WHERE id = ?`, JSON.stringify(nodes), JSON.stringify(edges), - timestamp + timestamp, + DurableWorkflow.STORAGE_ID ); this.dirty = true; - // Schedule an alarm to persist to database in 60 seconds if not already scheduled + // Schedule an alarm to persist to database if not already scheduled const currentAlarm = await this.ctx.storage.getAlarm(); if (currentAlarm === null) { - await this.ctx.storage.setAlarm(Date.now() + 60000); + await this.ctx.storage.setAlarm( + Date.now() + DurableWorkflow.PERSIST_DELAY_MS + ); } } @@ -237,74 +202,58 @@ export class DurableWorkflow extends DurableObject { // If still dirty (updates happened during persist), schedule another alarm if (this.dirty) { - await this.ctx.storage.setAlarm(Date.now() + 60000); + await this.ctx.storage.setAlarm( + Date.now() + DurableWorkflow.PERSIST_DELAY_MS + ); } } async fetch(request: Request): Promise { const url = new URL(request.url); - - // Extract workflowId and organizationId from query params const workflowId = url.searchParams.get("workflowId") || ""; const organizationId = url.searchParams.get("organizationId") || ""; - // Ensure workflow is loaded from database if (workflowId && organizationId) { await this.ensureLoaded(workflowId, organizationId); } - // Handle GET request for workflow state if (url.pathname === "/state" && request.method === "GET") { - try { - const state = await this.getState(); - - return new Response(JSON.stringify(state), { - headers: { "Content-Type": "application/json" }, - }); - } catch (error) { - console.error("Error getting workflow state:", error); - return new Response( - JSON.stringify({ - error: "Failed to get workflow state", - details: error instanceof Error ? error.message : "Unknown error", - }), - { - status: 500, - headers: { "Content-Type": "application/json" }, - } - ); - } + return this.handleStateRequest(); } - // Handle WebSocket connections (ensureLoaded called earlier if params present) const upgradeHeader = request.headers.get("Upgrade"); - if (!upgradeHeader || upgradeHeader !== "websocket") { - return new Response("Expected WebSocket or /state GET request", { - status: 426, - }); + if (upgradeHeader === "websocket") { + return this.handleWebSocketUpgrade(); + } + + return new Response("Expected /state GET or WebSocket upgrade", { + status: 400, + }); + } + + private async handleStateRequest(): Promise { + try { + const state = await this.getState(); + return Response.json(state); + } catch (error) { + console.error("Error getting workflow state:", error); + return Response.json( + { + error: "Failed to get workflow state", + details: error instanceof Error ? error.message : "Unknown error", + }, + { status: 500 } + ); } + } + private async handleWebSocketUpgrade(): Promise { const webSocketPair = new WebSocketPair(); const [client, server] = Object.values(webSocketPair); this.ctx.acceptWebSocket(server); - // Send initial state - let initState: WorkflowState; - try { - initState = await this.getState(); - } catch { - // Fallback minimal state - initState = { - id: workflowId, - name: "New Workflow", - handle: workflowId, - type: "manual", - nodes: [], - edges: [], - timestamp: Date.now(), - }; - } + const initState = await this.getInitialState(); const initMessage: WorkflowInitMessage = { type: "init", state: initState, @@ -317,6 +266,22 @@ export class DurableWorkflow extends DurableObject { }); } + private async getInitialState(): Promise { + try { + return await this.getState(); + } catch { + return { + id: this.workflowId, + name: "New Workflow", + handle: this.workflowId, + type: "manual", + nodes: [], + edges: [], + timestamp: Date.now(), + }; + } + } + async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { try { if (typeof message !== "string") { diff --git a/apps/api/src/routes/workflows.ts b/apps/api/src/routes/workflows.ts index 50d2d095..12d11731 100644 --- a/apps/api/src/routes/workflows.ts +++ b/apps/api/src/routes/workflows.ts @@ -164,51 +164,40 @@ workflowRoutes.get("/:id", jwtMiddleware, async (c) => { return c.json({ error: "Unauthorized" }, 401); } + const db = createDatabase(c.env.DB); + try { - // Get workflow from Durable Object + // Try Durable Object first const doId = c.env.DURABLE_WORKFLOW.idFromName(`${userId}-${id}`); - const stub = c.env.DURABLE_WORKFLOW.get(doId); - // @ts-ignore - const workflowData = await stub.getState(); - - if (!workflowData) { - // If DO doesn't have it, fall back to database - const db = createDatabase(c.env.DB); - const workflow = await getWorkflow(db, id, organizationId); - if (!workflow) { - return c.json({ error: "Workflow not found" }, 404); - } - - const workflowData = workflow.data; + const stub: any = c.env.DURABLE_WORKFLOW.get(doId); + let workflowData: any = null; + try { + workflowData = await stub.getState(); + } catch { + // DO doesn't have data yet, will fall back to database + workflowData = null; + } - const response: GetWorkflowResponse = { - id: workflow.id, - name: workflow.name, - handle: workflow.handle, - type: workflowData.type, - createdAt: workflow.createdAt, - updatedAt: workflow.updatedAt, - nodes: workflowData.nodes || [], - edges: workflowData.edges || [], - }; + // Get metadata from database for timestamps + const workflow = await getWorkflow(db, id, organizationId); - return c.json(response); + if (!workflowData && !workflow) { + return c.json({ error: "Workflow not found" }, 404); } - // Get metadata from database for createdAt/updatedAt - const db = createDatabase(c.env.DB); - const workflow = await getWorkflow(db, id, organizationId); + // Extract values to avoid deep type instantiation + const nodes: any = workflowData?.nodes || workflow!.data.nodes || []; + const edges: any = workflowData?.edges || workflow!.data.edges || []; const response: GetWorkflowResponse = { - id: workflowData.id, - name: workflowData.name, - handle: workflowData.handle, - type: workflowData.type, + id: workflowData?.id || workflow!.id, + name: workflowData?.name || workflow!.name, + handle: workflowData?.handle || workflow!.handle, + type: workflowData?.type || workflow!.data.type, createdAt: workflow?.createdAt || new Date(), updatedAt: workflow?.updatedAt || new Date(), - // @ts-ignore - nodes: workflowData.nodes || [], - edges: workflowData.edges || [], + nodes, + edges, }; return c.json(response); @@ -519,22 +508,14 @@ workflowRoutes.post( let deploymentId: string | undefined; if (version === "dev") { - // Get workflow data from Durable Object first - let userId: string; - const jwtPayload = c.get("jwtPayload") as JWTTokenPayload | undefined; - if (jwtPayload) { - userId = jwtPayload.sub || "anonymous"; - } else { - userId = "api"; // Use a placeholder for API-triggered executions - } - + // Get workflow data from Durable Object first (userId already defined above) const doId = c.env.DURABLE_WORKFLOW.idFromName( `${userId}-${workflowIdOrHandle}` ); - const stub = c.env.DURABLE_WORKFLOW.get(doId); - const state = await stub.getState(); + const stub: any = c.env.DURABLE_WORKFLOW.get(doId); - if (state) { + try { + const state = await stub.getState(); workflowData = { type: state.type, nodes: state.nodes || [], @@ -545,7 +526,7 @@ workflowRoutes.post( name: state.name, handle: state.handle, }; - } else { + } catch { // Fallback to database workflow = await getWorkflow(db, workflowIdOrHandle, organizationId); if (!workflow) { From 8c2e1238ae46334f9dc7ef71caa983ffbafe8205 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 01:27:01 +0200 Subject: [PATCH 09/29] fix: update WebSocket endpoint to require workflowId as a URL parameter and adjust error handling for unauthorized access --- apps/api/src/routes/ws.ts | 16 ++++------------ .../web/src/services/durable-workflow-service.ts | 2 +- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/apps/api/src/routes/ws.ts b/apps/api/src/routes/ws.ts index 6f578030..8903a432 100644 --- a/apps/api/src/routes/ws.ts +++ b/apps/api/src/routes/ws.ts @@ -6,7 +6,7 @@ import { ApiContext } from "../context"; const wsRoutes = new Hono(); // WebSocket endpoint for real-time workflow state synchronization -wsRoutes.get("/", jwtMiddleware, async (c) => { +wsRoutes.get("/:workflowId", jwtMiddleware, async (c) => { const upgradeHeader = c.req.header("Upgrade"); if (!upgradeHeader || upgradeHeader !== "websocket") { @@ -14,19 +14,11 @@ wsRoutes.get("/", jwtMiddleware, async (c) => { } const userId = c.var.jwtPayload?.sub; - const workflowId = c.req.query("workflowId"); + const workflowId = c.req.param("workflowId"); const organizationId = c.get("organizationId")!; - if (!userId || !workflowId || !organizationId) { - console.error("Missing userId, workflowId or organizationId:", { - userId, - workflowId, - organizationId, - }); - return c.json( - { error: "Missing userId, workflowId or organizationId" }, - 400 - ); + if (!userId || !organizationId) { + return c.json({ error: "Unauthorized" }, 401); } // Create a unique DO ID for this user + workflow combination diff --git a/apps/web/src/services/durable-workflow-service.ts b/apps/web/src/services/durable-workflow-service.ts index ab489fae..ec3cca0b 100644 --- a/apps/web/src/services/durable-workflow-service.ts +++ b/apps/web/src/services/durable-workflow-service.ts @@ -50,7 +50,7 @@ export class WorkflowWebSocket { const apiBaseUrl = getApiBaseUrl(); const wsBaseUrl = apiBaseUrl.replace(/^http/, "ws"); - const url = `${wsBaseUrl}/${this.orgHandle}/ws?workflowId=${this.workflowId}`; + const url = `${wsBaseUrl}/${this.orgHandle}/ws/${this.workflowId}`; try { this.ws = new WebSocket(url); From 580b1ba78e2f215a9b610e35809f01fe6b65f1aa Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 11:08:56 +0200 Subject: [PATCH 10/29] refactor: replace state with refs for nodes and edges in EditorPage to prevent stale closures and unnecessary re-renders --- apps/web/src/pages/editor-page.tsx | 34 +++++++++++++----------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/apps/web/src/pages/editor-page.tsx b/apps/web/src/pages/editor-page.tsx index 4fb7975e..21cc497e 100644 --- a/apps/web/src/pages/editor-page.tsx +++ b/apps/web/src/pages/editor-page.tsx @@ -126,13 +126,9 @@ export function EditorPage() { [deploymentHistory] ); - // Only track nodes since they're used by validateConnection and editorExecuteWorkflow - // Edges are only passed through to saveWorkflow and don't need to be stored - const [latestUiNodes, setLatestUiNodes] = useState[]>( - [] - ); - - // Store edges in a ref to avoid stale closures without causing re-renders + // Use refs for both nodes and edges to avoid stale closures and unnecessary re-renders + // Both are only used for validation, execution, and saving - no UI rendering depends on them + const latestUiNodesRef = useRef[]>([]); const latestUiEdgesRef = useRef[]>([]); const handleOpenSetCronDialog = useCallback(() => { @@ -168,7 +164,7 @@ export function EditorPage() { useEffect(() => { if (initialNodesForUI) { - setLatestUiNodes(initialNodesForUI); + latestUiNodesRef.current = initialNodesForUI; } }, [initialNodesForUI]); @@ -180,9 +176,9 @@ export function EditorPage() { const handleUiNodesChanged = useCallback( (updatedNodesFromUI: Node[]) => { - setLatestUiNodes(updatedNodesFromUI); + latestUiNodesRef.current = updatedNodesFromUI; if (workflowMetadata) { - // Use ref for edges to get current value without stale closure + // Use refs for both to get current values without stale closures saveWorkflow(updatedNodesFromUI, latestUiEdgesRef.current); } }, @@ -193,11 +189,11 @@ export function EditorPage() { (updatedEdgesFromUI: Edge[]) => { latestUiEdgesRef.current = updatedEdgesFromUI; if (workflowMetadata) { - // Use state for nodes since it's always current in this callback - saveWorkflow(latestUiNodes, updatedEdgesFromUI); + // Use refs for both to get current values without stale closures + saveWorkflow(latestUiNodesRef.current, updatedEdgesFromUI); } }, - [latestUiNodes, saveWorkflow, workflowMetadata] + [saveWorkflow, workflowMetadata] ); const { @@ -223,10 +219,10 @@ export function EditorPage() { const validateConnection = useCallback( (connection: Connection) => { - const sourceNode = latestUiNodes.find( + const sourceNode = latestUiNodesRef.current.find( (node) => node.id === connection.source ); - const targetNode = latestUiNodes.find( + const targetNode = latestUiNodesRef.current.find( (node) => node.id === connection.target ); if (!sourceNode || !targetNode) return false; @@ -246,7 +242,7 @@ export function EditorPage() { return typesMatch; }, - [latestUiNodes] + [] // No dependencies since we're using refs ); const editorExecuteWorkflow = useCallback( @@ -257,12 +253,12 @@ export function EditorPage() { return executeWorkflow( workflowIdFromBuilder, onExecutionFromBuilder, - latestUiNodes, + latestUiNodesRef.current, nodeTemplates as any, workflowMetadata?.type ); }, - [executeWorkflow, latestUiNodes, nodeTemplates, workflowMetadata?.type] + [executeWorkflow, nodeTemplates, workflowMetadata?.type] // No latestUiNodes dependency since we're using refs ); const handleRetryLoading = () => { @@ -383,7 +379,7 @@ export function EditorPage() { orgHandle={orgHandle} workflowId={id!} deploymentVersion="dev" - nodes={latestUiNodes} + nodes={latestUiNodesRef.current} nodeTemplates={nodeTemplates} /> )} From d03ddd7460087d38384623ec4369f1b275449d69 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 13:59:27 +0200 Subject: [PATCH 11/29] Create CLAUDE.md --- CLAUDE.md | 162 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..a77431d2 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,162 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Dafthunk is a visual workflow automation platform built on Cloudflare infrastructure (Workers, D1, R2, AI). Users create workflows by connecting 50+ node types in a visual editor (React Flow). + +**Monorepo structure** (pnpm workspaces): +- `apps/api` - Backend (Hono on Cloudflare Workers) +- `apps/web` - Frontend (React 19 + React Router v7 + Vite) +- `packages/types` - Shared TypeScript types +- `packages/utils` - Shared utilities + +## Development Commands + +### Common commands +```bash +pnpm dev # Start all services +pnpm build # Build all packages and apps +pnpm typecheck # Type check all workspaces +pnpm lint # Lint and type check +pnpm fix # Auto-fix linting + format +pnpm test # Run tests + +# Workspace-specific (use --filter) +pnpm --filter '@dafthunk/api' dev # API dev server (port 3001) +pnpm --filter '@dafthunk/web' dev # Web dev server (port 3000) +pnpm --filter '@dafthunk/api' test:integration # Integration tests + +# Database migrations +pnpm --filter '@dafthunk/api' db:migrate # Apply migrations locally +pnpm --filter '@dafthunk/api' db:generate # Generate new migrations +pnpm --filter '@dafthunk/api' db:prod:migrate # Apply to production +``` + +## Architecture + +### Backend: API (`apps/api/`) + +**Routes** (`src/routes/`) +- Organized by feature (workflows, executions, objects, etc.) +- Stateless: each request is self-contained +- Auth in `src/auth.ts` (JWT + API Keys) +- Multi-tenant: always scope by `organizationId` from context (`c.get("organizationId")`) +- Validate with Zod + `@hono/zod-validator` + +**Database** (`src/db/`) +- D1 (SQLite) + Drizzle ORM +- Schema: `schema/index.ts` +- Queries: `queries.ts` +- Migrations: `migrations/` (generate with `drizzle-kit`) +- Convention: `snake_case` in SQL, `camelCase` in TypeScript + +**Workflow Runtime** (`src/runtime/`) +- `runtime.ts` - Cloudflare Workflows for durable execution +- Durable Objects manage state +- `object-store.ts` - Node outputs (R2 + transient storage) +- Executes nodes by graph topology + +**Node System** (`src/nodes/`) +- node types in category folders: `text/`, `image/`, `audio/`, `browser/`, `logic/`, `math/`, `javascript/`, `anthropic/`, `openai/`, `gemini/`, `3d/`, `date/`, `document/`, `email/`, `geo/`, `json/`, `net/`, `parameter/`, `rag/` +- Registry: `base-node-registry.ts` and `cloudflare-node-registry.ts` +- All implement common interface from `packages/types` + +### Frontend: Web (`apps/web/`) + +**Structure** +- Pages: `src/pages/` (one file per route) +- Components: `src/components/` (`ui/` = shadcn/ui, `workflow/` = React Flow editor) +- Routes: `src/routes.tsx` (React Router v7) +- Services: `src/services/` (API clients) + +**Patterns** +- Data fetching: SWR (consolidate related calls) +- Styling: Tailwind CSS only (use `cn()` utility) +- State: Avoid `useEffect`, prefer derived state + +### Shared: Types (`packages/types/`) +- Single source of truth for data structures +- Backend serializes, frontend deserializes/validates +- Ensures type safety across stack + +## Design Principles + +When writing or refactoring code: + +### Simplify Interfaces +- Export only what's necessary—hide everything else +- Keep public APIs small (fewer exports = less complexity) +- Use barrel exports (`index.ts`) to define module boundaries +- If a function/class can't be described in one sentence, split it + +### Manage Complexity +- Push complexity into lower-level modules with simple APIs +- Eliminate unnecessary state, conditionals, and abstractions +- Keep related logic together; separate unrelated concerns +- Depend on interfaces/types, not concrete implementations + +### Prioritize Maintainability +- Write the calling code you want first, then implement to match +- After code works, refactor to simplify the interface +- Use comments for *why* (design decisions, trade-offs), not *what* (code explains itself) +- Front-load architectural decisions (module boundaries, data flow); defer details (naming, parameters) + +## Code Guidelines + +### TypeScript Style +- Strict mode: never use `any` or `unknown` +- Prefer `interface` over `type` for object shapes +- Always use `import type` for type-only imports +- Use early returns to avoid deep nesting + +### Naming Conventions +``` +Files: kebab-case.tsx +Functions: camelCase() +Hooks: useCamelCase() +Event handlers: handleClick() +Components: PascalCase +``` + +### React (apps/web) +```tsx +// ✓ Correct +import { Link } from 'react-router' // not react-router-dom +import type { User } from '@dafthunk/types' +export function MyComponent() { ... } // functional component + +// Data fetching +const { data } = useSWR(['/users', '/posts'], fetchAll) // consolidate + +// Styling +
+ +// Avoid useEffect - prefer derived state or move logic outside React +``` + +### Hono API (apps/api) +```ts +// Routes by feature +const workflows = new Hono() +workflows.get('/', zValidator('query', schema), (c) => { + const orgId = c.get('organizationId') // always scope by org + // ... +}) +app.route('/workflows', workflows) + +// Database +const users = sqliteTable('users', { + createdAt: text('created_at'), // snake_case in DB +}) +export type User = InferModel +``` + +### Testing +```ts +// Unit tests: *.test.ts +import { describe, it, expect } from 'vitest' + +// Integration tests: *.integration.ts +``` From f98983ef772fc5a023bef4f74b30fa15ecb352bf Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 21:32:48 +0200 Subject: [PATCH 12/29] Simplify state management in durable object --- .../src/durable-objects/durable-workflow.ts | 230 ++++++------------ 1 file changed, 72 insertions(+), 158 deletions(-) diff --git a/apps/api/src/durable-objects/durable-workflow.ts b/apps/api/src/durable-objects/durable-workflow.ts index b7312d5f..647c2e98 100644 --- a/apps/api/src/durable-objects/durable-workflow.ts +++ b/apps/api/src/durable-objects/durable-workflow.ts @@ -1,4 +1,6 @@ import { + Edge, + Node, WorkflowErrorMessage, WorkflowInitMessage, WorkflowMessage, @@ -13,95 +15,40 @@ import { createDatabase } from "../db/index"; import { getWorkflow, updateWorkflow } from "../db/queries"; export class DurableWorkflow extends DurableObject { - private static readonly PERSIST_DELAY_MS = 60_000; - private static readonly STORAGE_ID = "current"; - - private sql: SqlStorage; - private workflowId: string = ""; - private organizationId: string = ""; - private loaded: boolean = false; - private dirty: boolean = false; + private state: WorkflowState | null = null; + private workflowId: string | null = null; + private organizationId: string | null = null; constructor(ctx: DurableObjectState, env: Bindings) { super(ctx, env); - this.sql = this.ctx.storage.sql; - this.initDatabase(); - } - - private initDatabase() { - this.sql.exec(` - CREATE TABLE IF NOT EXISTS workflow ( - id TEXT PRIMARY KEY DEFAULT 'current', - workflow_id TEXT NOT NULL, - organization_id TEXT NOT NULL, - name TEXT NOT NULL, - handle TEXT NOT NULL, - type TEXT NOT NULL, - nodes TEXT NOT NULL, - edges TEXT NOT NULL, - timestamp INTEGER NOT NULL - ) - `); } /** - * Load workflow from database into durable storage if not already loaded + * Load workflow from D1 database into memory */ - private async ensureLoaded( + private async loadState( workflowId: string, organizationId: string ): Promise { - if (this.loaded) { - return; - } - this.workflowId = workflowId; this.organizationId = organizationId; - try { - // First check if SQLite storage already has data (from previous session) - // This is important because SQLite storage persists across cold starts - const existing = this.sql - .exec( - "SELECT workflow_id FROM workflow WHERE id = ?", - DurableWorkflow.STORAGE_ID - ) - .toArray(); - - if (existing.length > 0) { - console.log(`Using existing SQLite storage for workflow ${workflowId}`); - this.loaded = true; - return; - } + console.log(`Loading workflow ${workflowId} from D1 database`); + const db = createDatabase(this.env.DB); + const workflow = await getWorkflow(db, workflowId, organizationId); - // SQLite storage is empty, load from D1 database - console.log(`Loading workflow ${workflowId} from D1 database`); - const db = createDatabase(this.env.DB); - const workflow = await getWorkflow(db, workflowId, organizationId); - - const { name, handle, type, nodes, edges, timestamp } = - this.extractWorkflowData(workflow); - - this.sql.exec( - `INSERT INTO workflow (id, workflow_id, organization_id, name, handle, type, nodes, edges, timestamp) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, - DurableWorkflow.STORAGE_ID, - workflowId, - organizationId, - name, - handle, - type, - nodes, - edges, - timestamp - ); + const { name, handle, type, nodes, edges, timestamp } = + this.extractWorkflowData(workflow); - this.dirty = false; - } catch (error) { - console.error("Error loading workflow:", error); - } - - this.loaded = true; + this.state = { + id: workflowId, + name, + handle, + type, + nodes, + edges, + timestamp, + }; } private extractWorkflowData(workflow: any) { @@ -109,112 +56,90 @@ export class DurableWorkflow extends DurableObject { name: workflow?.name || "New Workflow", handle: workflow?.handle || this.workflowId, type: (workflow?.data?.type || "manual") as WorkflowType, - nodes: JSON.stringify(workflow?.data?.nodes || []), - edges: JSON.stringify(workflow?.data?.edges || []), + nodes: workflow?.data?.nodes || [], + edges: workflow?.data?.edges || [], timestamp: workflow?.updatedAt?.getTime() || Date.now(), }; } /** - * Get state from durable storage + * Get state from memory */ async getState(): Promise { - const row = this.sql - .exec( - `SELECT workflow_id as id, name, handle, type, nodes, edges, timestamp - FROM workflow WHERE id = ?`, - DurableWorkflow.STORAGE_ID - ) - .toArray()[0]; - - if (!row) { - throw new Error("State missing; call ensureLoaded first"); + if (!this.state) { + throw new Error("State not loaded"); } - return { - id: row.id as string, - name: row.name as string, - handle: row.handle as string, - type: row.type as WorkflowType, - nodes: JSON.parse(row.nodes as string), - edges: JSON.parse(row.edges as string), - timestamp: row.timestamp as number, - }; + return this.state; } - async updateState(nodes: unknown[], edges: unknown[]): Promise { + async updateState(nodes: Node[], edges: Edge[]): Promise { + if (!this.state) { + throw new Error("State not loaded"); + } + const timestamp = Date.now(); - this.sql.exec( - `UPDATE workflow SET nodes = ?, edges = ?, timestamp = ? WHERE id = ?`, - JSON.stringify(nodes), - JSON.stringify(edges), + this.state = { + ...this.state, + nodes, + edges, timestamp, - DurableWorkflow.STORAGE_ID - ); - - this.dirty = true; + }; - // Schedule an alarm to persist to database if not already scheduled - const currentAlarm = await this.ctx.storage.getAlarm(); - if (currentAlarm === null) { - await this.ctx.storage.setAlarm( - Date.now() + DurableWorkflow.PERSIST_DELAY_MS - ); - } + // Persist immediately to D1 + await this.persistToDatabase(); } /** - * Persist durable state back to database + * Persist state back to D1 database */ private async persistToDatabase(): Promise { - if (!this.dirty || !this.workflowId || !this.organizationId) { + if (!this.state || !this.workflowId || !this.organizationId) { return; } try { - const state = await this.getState(); const db = createDatabase(this.env.DB); await updateWorkflow(db, this.workflowId, this.organizationId, { - name: state.name, + name: this.state.name, data: { - id: state.id, - name: state.name, - handle: state.handle, - type: state.type, - nodes: state.nodes, - edges: state.edges, + id: this.state.id, + name: this.state.name, + handle: this.state.handle, + type: this.state.type, + nodes: this.state.nodes, + edges: this.state.edges, }, }); - this.dirty = false; - console.log(`Persisted workflow ${this.workflowId} to database`); + console.log(`Persisted workflow ${this.workflowId} to D1 database`); } catch (error) { console.error("Error persisting workflow to database:", error); } } - /** - * Alarm handler - called when alarm fires - */ - async alarm(): Promise { - console.log("Alarm fired for DurableWorkflow"); - await this.persistToDatabase(); - - // If still dirty (updates happened during persist), schedule another alarm - if (this.dirty) { - await this.ctx.storage.setAlarm( - Date.now() + DurableWorkflow.PERSIST_DELAY_MS - ); - } - } - async fetch(request: Request): Promise { const url = new URL(request.url); const workflowId = url.searchParams.get("workflowId") || ""; const organizationId = url.searchParams.get("organizationId") || ""; - if (workflowId && organizationId) { - await this.ensureLoaded(workflowId, organizationId); + if (!workflowId || !organizationId) { + return new Response("Missing workflowId or organizationId", { + status: 400, + }); + } + + try { + await this.loadState(workflowId, organizationId); + } catch (error) { + console.error("Error loading workflow:", error); + return Response.json( + { + error: "Failed to load workflow", + details: error instanceof Error ? error.message : "Unknown error", + }, + { status: 404 } + ); } if (url.pathname === "/state" && request.method === "GET") { @@ -267,19 +192,7 @@ export class DurableWorkflow extends DurableObject { } private async getInitialState(): Promise { - try { - return await this.getState(); - } catch { - return { - id: this.workflowId, - name: "New Workflow", - handle: this.workflowId, - type: "manual", - nodes: [], - edges: [], - timestamp: Date.now(), - }; - } + return await this.getState(); } async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { @@ -311,11 +224,12 @@ export class DurableWorkflow extends DurableObject { } async webSocketClose( - ws: WebSocket, - code: number, - reason: string, + _ws: WebSocket, + _code: number, + _reason: string, _wasClean: boolean ) { - ws.close(code, reason); + // Persist any pending changes to D1 before closing + await this.persistToDatabase(); } } From 6d5cb3ac395e9182bbebfe9ddc13b96bb09de546 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sat, 4 Oct 2025 22:27:22 +0200 Subject: [PATCH 13/29] Refactor durable-workflow into user-session --- apps/api/src/context.ts | 4 +- apps/api/src/db/queries.ts | 31 ++ .../src/durable-objects/durable-workflow.ts | 235 --------------- apps/api/src/durable-objects/user-session.ts | 268 ++++++++++++++++++ apps/api/src/index.ts | 2 +- apps/api/src/routes/workflows.ts | 8 +- apps/api/src/routes/ws.ts | 24 +- apps/api/src/utils/encryption.test.ts | 4 +- apps/api/wrangler.jsonc | 10 +- 9 files changed, 326 insertions(+), 260 deletions(-) delete mode 100644 apps/api/src/durable-objects/durable-workflow.ts create mode 100644 apps/api/src/durable-objects/user-session.ts diff --git a/apps/api/src/context.ts b/apps/api/src/context.ts index 2da1be9d..10febf1b 100644 --- a/apps/api/src/context.ts +++ b/apps/api/src/context.ts @@ -1,6 +1,6 @@ import { JWTTokenPayload } from "@dafthunk/types"; -import { DurableWorkflow } from "./durable-objects/durable-workflow"; +import { UserSession } from "./durable-objects/user-session"; import { RuntimeParams } from "./runtime/runtime"; export interface Bindings { @@ -10,7 +10,7 @@ export interface Bindings { RATE_LIMIT_AUTH: RateLimit; RATE_LIMIT_EXECUTE: RateLimit; EXECUTE: Workflow; - DURABLE_WORKFLOW: DurableObjectNamespace; + USER_SESSION: DurableObjectNamespace; RESSOURCES: R2Bucket; DATASETS: R2Bucket; DATASETS_AUTORAG: string; diff --git a/apps/api/src/db/queries.ts b/apps/api/src/db/queries.ts index a5d098bc..880d1f23 100644 --- a/apps/api/src/db/queries.ts +++ b/apps/api/src/db/queries.ts @@ -302,6 +302,37 @@ export async function getWorkflow( return workflow?.workflows; } +/** + * Get a workflow that the user has access to through their organization memberships + * + * @param db Database instance + * @param workflowIdOrHandle Workflow ID or handle + * @param userId User ID to check access for + * @returns The workflow and organization ID if user has access, undefined otherwise + */ +export async function getWorkflowWithUserAccess( + db: ReturnType, + workflowIdOrHandle: string, + userId: string +): Promise<{ workflow: WorkflowRow; organizationId: string } | undefined> { + const [result] = await db + .select({ + workflow: workflows, + organizationId: workflows.organizationId, + }) + .from(workflows) + .innerJoin(memberships, eq(workflows.organizationId, memberships.organizationId)) + .where( + and( + eq(memberships.userId, userId), + getWorkflowCondition(workflowIdOrHandle) + ) + ) + .limit(1); + + return result ? { workflow: result.workflow, organizationId: result.organizationId } : undefined; +} + /** * Get the latest deployment for a workflow * diff --git a/apps/api/src/durable-objects/durable-workflow.ts b/apps/api/src/durable-objects/durable-workflow.ts deleted file mode 100644 index 647c2e98..00000000 --- a/apps/api/src/durable-objects/durable-workflow.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { - Edge, - Node, - WorkflowErrorMessage, - WorkflowInitMessage, - WorkflowMessage, - WorkflowState, - WorkflowType, - WorkflowUpdateMessage, -} from "@dafthunk/types"; -import { DurableObject } from "cloudflare:workers"; - -import { Bindings } from "../context"; -import { createDatabase } from "../db/index"; -import { getWorkflow, updateWorkflow } from "../db/queries"; - -export class DurableWorkflow extends DurableObject { - private state: WorkflowState | null = null; - private workflowId: string | null = null; - private organizationId: string | null = null; - - constructor(ctx: DurableObjectState, env: Bindings) { - super(ctx, env); - } - - /** - * Load workflow from D1 database into memory - */ - private async loadState( - workflowId: string, - organizationId: string - ): Promise { - this.workflowId = workflowId; - this.organizationId = organizationId; - - console.log(`Loading workflow ${workflowId} from D1 database`); - const db = createDatabase(this.env.DB); - const workflow = await getWorkflow(db, workflowId, organizationId); - - const { name, handle, type, nodes, edges, timestamp } = - this.extractWorkflowData(workflow); - - this.state = { - id: workflowId, - name, - handle, - type, - nodes, - edges, - timestamp, - }; - } - - private extractWorkflowData(workflow: any) { - return { - name: workflow?.name || "New Workflow", - handle: workflow?.handle || this.workflowId, - type: (workflow?.data?.type || "manual") as WorkflowType, - nodes: workflow?.data?.nodes || [], - edges: workflow?.data?.edges || [], - timestamp: workflow?.updatedAt?.getTime() || Date.now(), - }; - } - - /** - * Get state from memory - */ - async getState(): Promise { - if (!this.state) { - throw new Error("State not loaded"); - } - - return this.state; - } - - async updateState(nodes: Node[], edges: Edge[]): Promise { - if (!this.state) { - throw new Error("State not loaded"); - } - - const timestamp = Date.now(); - this.state = { - ...this.state, - nodes, - edges, - timestamp, - }; - - // Persist immediately to D1 - await this.persistToDatabase(); - } - - /** - * Persist state back to D1 database - */ - private async persistToDatabase(): Promise { - if (!this.state || !this.workflowId || !this.organizationId) { - return; - } - - try { - const db = createDatabase(this.env.DB); - await updateWorkflow(db, this.workflowId, this.organizationId, { - name: this.state.name, - data: { - id: this.state.id, - name: this.state.name, - handle: this.state.handle, - type: this.state.type, - nodes: this.state.nodes, - edges: this.state.edges, - }, - }); - - console.log(`Persisted workflow ${this.workflowId} to D1 database`); - } catch (error) { - console.error("Error persisting workflow to database:", error); - } - } - - async fetch(request: Request): Promise { - const url = new URL(request.url); - const workflowId = url.searchParams.get("workflowId") || ""; - const organizationId = url.searchParams.get("organizationId") || ""; - - if (!workflowId || !organizationId) { - return new Response("Missing workflowId or organizationId", { - status: 400, - }); - } - - try { - await this.loadState(workflowId, organizationId); - } catch (error) { - console.error("Error loading workflow:", error); - return Response.json( - { - error: "Failed to load workflow", - details: error instanceof Error ? error.message : "Unknown error", - }, - { status: 404 } - ); - } - - if (url.pathname === "/state" && request.method === "GET") { - return this.handleStateRequest(); - } - - const upgradeHeader = request.headers.get("Upgrade"); - if (upgradeHeader === "websocket") { - return this.handleWebSocketUpgrade(); - } - - return new Response("Expected /state GET or WebSocket upgrade", { - status: 400, - }); - } - - private async handleStateRequest(): Promise { - try { - const state = await this.getState(); - return Response.json(state); - } catch (error) { - console.error("Error getting workflow state:", error); - return Response.json( - { - error: "Failed to get workflow state", - details: error instanceof Error ? error.message : "Unknown error", - }, - { status: 500 } - ); - } - } - - private async handleWebSocketUpgrade(): Promise { - const webSocketPair = new WebSocketPair(); - const [client, server] = Object.values(webSocketPair); - - this.ctx.acceptWebSocket(server); - - const initState = await this.getInitialState(); - const initMessage: WorkflowInitMessage = { - type: "init", - state: initState, - }; - server.send(JSON.stringify(initMessage)); - - return new Response(null, { - status: 101, - webSocket: client, - }); - } - - private async getInitialState(): Promise { - return await this.getState(); - } - - async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { - try { - if (typeof message !== "string") { - const errorMsg: WorkflowErrorMessage = { - error: "Expected string message", - }; - ws.send(JSON.stringify(errorMsg)); - return; - } - - const data = JSON.parse(message) as WorkflowMessage; - - if ("type" in data && data.type === "update") { - const updateMsg = data as WorkflowUpdateMessage; - - // Update with the new state - await this.updateState(updateMsg.nodes, updateMsg.edges); - } - } catch (error) { - console.error("WebSocket message error:", error); - const errorMsg: WorkflowErrorMessage = { - error: "Failed to process message", - details: error instanceof Error ? error.message : "Unknown error", - }; - ws.send(JSON.stringify(errorMsg)); - } - } - - async webSocketClose( - _ws: WebSocket, - _code: number, - _reason: string, - _wasClean: boolean - ) { - // Persist any pending changes to D1 before closing - await this.persistToDatabase(); - } -} diff --git a/apps/api/src/durable-objects/user-session.ts b/apps/api/src/durable-objects/user-session.ts new file mode 100644 index 00000000..bb63b47d --- /dev/null +++ b/apps/api/src/durable-objects/user-session.ts @@ -0,0 +1,268 @@ +import { + Edge, + Node, + WorkflowErrorMessage, + WorkflowInitMessage, + WorkflowMessage, + WorkflowState, + WorkflowType, + WorkflowUpdateMessage, +} from "@dafthunk/types"; +import { DurableObject } from "cloudflare:workers"; + +import { Bindings } from "../context"; +import { createDatabase } from "../db/index"; +import { getWorkflowWithUserAccess, updateWorkflow } from "../db/queries"; + +interface WorkflowSession { + state: WorkflowState; + organizationId: string; +} + +export class UserSession extends DurableObject { + private workflows: Map = new Map(); + private webSocketWorkflows: Map = new Map(); + + constructor(ctx: DurableObjectState, env: Bindings) { + super(ctx, env); + } + + /** + * Load workflow from D1 database with user access verification + */ + private async loadState( + workflowId: string, + userId: string + ): Promise { + console.log(`Loading workflow ${workflowId} for user ${userId}`); + const db = createDatabase(this.env.DB); + const result = await getWorkflowWithUserAccess(db, workflowId, userId); + + if (!result) { + throw new Error(`User ${userId} does not have access to workflow ${workflowId}`); + } + + const { workflow, organizationId } = result; + + const { name, handle, type, nodes, edges, timestamp } = + this.extractWorkflowData(workflow, workflowId); + + const state: WorkflowState = { + id: workflowId, + name, + handle, + type, + nodes, + edges, + timestamp, + }; + + this.workflows.set(workflowId, { state, organizationId }); + } + + private extractWorkflowData(workflow: any, workflowId: string) { + return { + name: workflow?.name || "New Workflow", + handle: workflow?.handle || workflowId, + type: (workflow?.data?.type || "manual") as WorkflowType, + nodes: workflow?.data?.nodes || [], + edges: workflow?.data?.edges || [], + timestamp: workflow?.updatedAt?.getTime() || Date.now(), + }; + } + + /** + * Get state from memory for a specific workflow + */ + async getState(workflowId: string): Promise { + const session = this.workflows.get(workflowId); + if (!session) { + throw new Error(`Workflow ${workflowId} not loaded`); + } + + return session.state; + } + + async updateState(workflowId: string, nodes: Node[], edges: Edge[]): Promise { + const session = this.workflows.get(workflowId); + if (!session) { + throw new Error(`Workflow ${workflowId} not loaded`); + } + + const timestamp = Date.now(); + const updatedState: WorkflowState = { + ...session.state, + nodes, + edges, + timestamp, + }; + + this.workflows.set(workflowId, { ...session, state: updatedState }); + + // Persist immediately to D1 + await this.persistToDatabase(workflowId); + } + + /** + * Persist state back to D1 database + */ + private async persistToDatabase(workflowId: string): Promise { + const session = this.workflows.get(workflowId); + + if (!session) { + return; + } + + try { + const db = createDatabase(this.env.DB); + await updateWorkflow(db, workflowId, session.organizationId, { + name: session.state.name, + data: { + id: session.state.id, + name: session.state.name, + handle: session.state.handle, + type: session.state.type, + nodes: session.state.nodes, + edges: session.state.edges, + }, + }); + + console.log(`Persisted workflow ${workflowId} to D1 database`); + } catch (error) { + console.error("Error persisting workflow to database:", error); + } + } + + async fetch(request: Request): Promise { + const url = new URL(request.url); + + // Extract workflowId from URL path (e.g., /ws/:workflowId) + const pathParts = url.pathname.split("/").filter(Boolean); + const workflowId = pathParts[pathParts.length - 1] || ""; + + // Extract userId from custom header + const userId = request.headers.get("X-User-Id") || ""; + + if (!workflowId) { + return new Response("Missing workflowId in path", { + status: 400, + }); + } + + if (!userId) { + return new Response("Missing userId header", { + status: 401, + }); + } + + try { + await this.loadState(workflowId, userId); + } catch (error) { + console.error("Error loading workflow:", error); + return Response.json( + { + error: "Failed to load workflow", + details: error instanceof Error ? error.message : "Unknown error", + }, + { status: 403 } + ); + } + + if (url.pathname.endsWith("/state") && request.method === "GET") { + return this.handleStateRequest(workflowId); + } + + const upgradeHeader = request.headers.get("Upgrade"); + if (upgradeHeader === "websocket") { + return this.handleWebSocketUpgrade(workflowId); + } + + return new Response("Expected /state GET or WebSocket upgrade", { + status: 400, + }); + } + + private async handleStateRequest(workflowId: string): Promise { + try { + const state = await this.getState(workflowId); + return Response.json(state); + } catch (error) { + console.error("Error getting workflow state:", error); + return Response.json( + { + error: "Failed to get workflow state", + details: error instanceof Error ? error.message : "Unknown error", + }, + { status: 500 } + ); + } + } + + private async handleWebSocketUpgrade(workflowId: string): Promise { + const webSocketPair = new WebSocketPair(); + const [client, server] = Object.values(webSocketPair); + + this.ctx.acceptWebSocket(server); + this.webSocketWorkflows.set(server, workflowId); + + const initState = await this.getState(workflowId); + const initMessage: WorkflowInitMessage = { + type: "init", + state: initState, + }; + server.send(JSON.stringify(initMessage)); + + return new Response(null, { + status: 101, + webSocket: client, + }); + } + + async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { + const workflowId = this.webSocketWorkflows.get(ws); + if (!workflowId) { + console.error("WebSocket not associated with any workflow"); + return; + } + + try { + if (typeof message !== "string") { + const errorMsg: WorkflowErrorMessage = { + error: "Expected string message", + }; + ws.send(JSON.stringify(errorMsg)); + return; + } + + const data = JSON.parse(message) as WorkflowMessage; + + if ("type" in data && data.type === "update") { + const updateMsg = data as WorkflowUpdateMessage; + + // Update with the new state + await this.updateState(workflowId, updateMsg.nodes, updateMsg.edges); + } + } catch (error) { + console.error("WebSocket message error:", error); + const errorMsg: WorkflowErrorMessage = { + error: "Failed to process message", + details: error instanceof Error ? error.message : "Unknown error", + }; + ws.send(JSON.stringify(errorMsg)); + } + } + + async webSocketClose( + ws: WebSocket, + _code: number, + _reason: string, + _wasClean: boolean + ) { + const workflowId = this.webSocketWorkflows.get(ws); + if (workflowId) { + // Persist any pending changes to D1 before closing + await this.persistToDatabase(workflowId); + this.webSocketWorkflows.delete(ws); + } + } +} diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 19b35d04..b14f5bc6 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,5 +1,5 @@ import { Hono } from "hono"; -export { DurableWorkflow } from "./durable-objects/durable-workflow"; +export { UserSession } from "./durable-objects/user-session"; export { Runtime } from "./runtime/runtime"; import auth from "./auth"; import { ApiContext } from "./context"; diff --git a/apps/api/src/routes/workflows.ts b/apps/api/src/routes/workflows.ts index 12d11731..83a60b28 100644 --- a/apps/api/src/routes/workflows.ts +++ b/apps/api/src/routes/workflows.ts @@ -168,8 +168,8 @@ workflowRoutes.get("/:id", jwtMiddleware, async (c) => { try { // Try Durable Object first - const doId = c.env.DURABLE_WORKFLOW.idFromName(`${userId}-${id}`); - const stub: any = c.env.DURABLE_WORKFLOW.get(doId); + const doId = c.env.USER_SESSION.idFromName(`${userId}-${id}`); + const stub: any = c.env.USER_SESSION.get(doId); let workflowData: any = null; try { workflowData = await stub.getState(); @@ -509,10 +509,10 @@ workflowRoutes.post( if (version === "dev") { // Get workflow data from Durable Object first (userId already defined above) - const doId = c.env.DURABLE_WORKFLOW.idFromName( + const doId = c.env.USER_SESSION.idFromName( `${userId}-${workflowIdOrHandle}` ); - const stub: any = c.env.DURABLE_WORKFLOW.get(doId); + const stub: any = c.env.USER_SESSION.get(doId); try { const state = await stub.getState(); diff --git a/apps/api/src/routes/ws.ts b/apps/api/src/routes/ws.ts index 8903a432..de63aaa8 100644 --- a/apps/api/src/routes/ws.ts +++ b/apps/api/src/routes/ws.ts @@ -14,22 +14,24 @@ wsRoutes.get("/:workflowId", jwtMiddleware, async (c) => { } const userId = c.var.jwtPayload?.sub; - const workflowId = c.req.param("workflowId"); - const organizationId = c.get("organizationId")!; - if (!userId || !organizationId) { + if (!userId) { return c.json({ error: "Unauthorized" }, 401); } - // Create a unique DO ID for this user + workflow combination - const doId = c.env.DURABLE_WORKFLOW.idFromName(`${userId}-${workflowId}`); - const stub = c.env.DURABLE_WORKFLOW.get(doId); + // Create a unique DO ID for this user + const doId = c.env.USER_SESSION.idFromName(userId); + const stub = c.env.USER_SESSION.get(doId); + + // Pass the original request with userId in a custom header + const headers = new Headers(c.req.raw.headers); + headers.set("X-User-Id", userId); + const newReq = new Request(c.req.url, { + method: c.req.method, + headers, + body: c.req.raw.body, + }); - // Reconstruct request with required query params for DO - const url = new URL(c.req.url); - url.searchParams.set("organizationId", organizationId); - url.searchParams.set("workflowId", workflowId); - const newReq = new Request(url.toString(), c.req.raw); return stub.fetch(newReq); }); diff --git a/apps/api/src/utils/encryption.test.ts b/apps/api/src/utils/encryption.test.ts index 39d75c46..a5ecc96c 100644 --- a/apps/api/src/utils/encryption.test.ts +++ b/apps/api/src/utils/encryption.test.ts @@ -5,7 +5,7 @@ import { beforeEach, describe, expect, it } from "vitest"; import { Bindings } from "../context"; -import { DurableWorkflow } from "../durable-objects/durable-workflow"; +import { UserSession } from "../durable-objects/user-session"; import { decryptSecret, encryptSecret } from "./encryption"; // Mock Bindings for testing @@ -21,7 +21,7 @@ const createMockEnv = (masterKey?: string): Bindings => ({ RATE_LIMIT_AUTH: {} as RateLimit, RATE_LIMIT_EXECUTE: {} as RateLimit, EXECUTE: {} as Workflow, - DURABLE_WORKFLOW: {} as DurableObjectNamespace, + USER_SESSION: {} as DurableObjectNamespace, RESSOURCES: {} as R2Bucket, DATASETS: {} as R2Bucket, DATASETS_AUTORAG: "", diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index 7918aa7e..7225af4e 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -70,8 +70,8 @@ "durable_objects": { "bindings": [ { - "name": "DURABLE_WORKFLOW", - "class_name": "DurableWorkflow", + "name": "USER_SESSION", + "class_name": "UserSession", "script_name": "dafthunk-api" } ] @@ -79,7 +79,7 @@ "migrations": [ { "tag": "v1", - "new_sqlite_classes": ["DurableWorkflow"] + "new_sqlite_classes": ["UserSession"] } ], "unsafe": { @@ -173,8 +173,8 @@ "durable_objects": { "bindings": [ { - "name": "DURABLE_WORKFLOW", - "class_name": "DurableWorkflow", + "name": "USER_SESSION", + "class_name": "UserSession", "script_name": "dafthunk-api" } ] From 88ca6643d6f43d63fa2321213413c1857e0e555d Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sun, 5 Oct 2025 00:14:08 +0200 Subject: [PATCH 14/29] Make the implementation more robust --- apps/api/src/durable-objects/user-session.ts | 106 +++++++++++------- apps/api/src/routes/datasets.ts | 3 +- .../src/services/durable-workflow-service.ts | 19 +++- packages/types/src/workflow.ts | 3 +- 4 files changed, 86 insertions(+), 45 deletions(-) diff --git a/apps/api/src/durable-objects/user-session.ts b/apps/api/src/durable-objects/user-session.ts index bb63b47d..96943165 100644 --- a/apps/api/src/durable-objects/user-session.ts +++ b/apps/api/src/durable-objects/user-session.ts @@ -20,8 +20,10 @@ interface WorkflowSession { } export class UserSession extends DurableObject { + private static readonly PERSIST_DEBOUNCE_MS = 500; + private workflows: Map = new Map(); - private webSocketWorkflows: Map = new Map(); + private pendingPersist: Map = new Map(); constructor(ctx: DurableObjectState, env: Bindings) { super(ctx, env); @@ -83,24 +85,50 @@ export class UserSession extends DurableObject { return session.state; } - async updateState(workflowId: string, nodes: Node[], edges: Edge[]): Promise { - const session = this.workflows.get(workflowId); + async updateState(state: WorkflowState): Promise { + const session = this.workflows.get(state.id); if (!session) { - throw new Error(`Workflow ${workflowId} not loaded`); + throw new Error(`Workflow ${state.id} not loaded`); } - const timestamp = Date.now(); - const updatedState: WorkflowState = { - ...session.state, - nodes, - edges, - timestamp, - }; + // Validate incoming state matches session + if (state.id !== session.state.id) { + throw new Error(`Workflow ID mismatch: expected ${session.state.id}, got ${state.id}`); + } - this.workflows.set(workflowId, { ...session, state: updatedState }); + // Validate required fields + if (!state.name || !state.handle || !state.type) { + throw new Error("Invalid state: missing required fields (name, handle, or type)"); + } - // Persist immediately to D1 - await this.persistToDatabase(workflowId); + // Validate arrays are present + if (!Array.isArray(state.nodes) || !Array.isArray(state.edges)) { + throw new Error("Invalid state: nodes and edges must be arrays"); + } + + this.workflows.set(state.id, { ...session, state }); + + // Debounce persistence to reduce D1 writes on rapid updates + this.schedulePersist(state.id); + } + + /** + * Schedule a debounced persist for a workflow + */ + private schedulePersist(workflowId: string): void { + // Clear any existing timeout + const existingTimeout = this.pendingPersist.get(workflowId); + if (existingTimeout !== undefined) { + clearTimeout(existingTimeout); + } + + // Schedule new persist + const timeoutId = setTimeout(() => { + this.persistToDatabase(workflowId); + this.pendingPersist.delete(workflowId); + }, UserSession.PERSIST_DEBOUNCE_MS) as unknown as number; + + this.pendingPersist.set(workflowId, timeoutId); } /** @@ -155,17 +183,20 @@ export class UserSession extends DurableObject { }); } - try { - await this.loadState(workflowId, userId); - } catch (error) { - console.error("Error loading workflow:", error); - return Response.json( - { - error: "Failed to load workflow", - details: error instanceof Error ? error.message : "Unknown error", - }, - { status: 403 } - ); + // Only load if not already in memory + if (!this.workflows.has(workflowId)) { + try { + await this.loadState(workflowId, userId); + } catch (error) { + console.error("Error loading workflow:", error); + return Response.json( + { + error: "Failed to load workflow", + details: error instanceof Error ? error.message : "Unknown error", + }, + { status: 403 } + ); + } } if (url.pathname.endsWith("/state") && request.method === "GET") { @@ -203,7 +234,6 @@ export class UserSession extends DurableObject { const [client, server] = Object.values(webSocketPair); this.ctx.acceptWebSocket(server); - this.webSocketWorkflows.set(server, workflowId); const initState = await this.getState(workflowId); const initMessage: WorkflowInitMessage = { @@ -219,12 +249,6 @@ export class UserSession extends DurableObject { } async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { - const workflowId = this.webSocketWorkflows.get(ws); - if (!workflowId) { - console.error("WebSocket not associated with any workflow"); - return; - } - try { if (typeof message !== "string") { const errorMsg: WorkflowErrorMessage = { @@ -240,7 +264,7 @@ export class UserSession extends DurableObject { const updateMsg = data as WorkflowUpdateMessage; // Update with the new state - await this.updateState(workflowId, updateMsg.nodes, updateMsg.edges); + await this.updateState(updateMsg.state); } } catch (error) { console.error("WebSocket message error:", error); @@ -253,16 +277,20 @@ export class UserSession extends DurableObject { } async webSocketClose( - ws: WebSocket, + _ws: WebSocket, _code: number, _reason: string, _wasClean: boolean ) { - const workflowId = this.webSocketWorkflows.get(ws); - if (workflowId) { - // Persist any pending changes to D1 before closing - await this.persistToDatabase(workflowId); - this.webSocketWorkflows.delete(ws); + // Flush all pending persists when connection closes + const persistPromises: Promise[] = []; + + for (const [workflowId, timeoutId] of this.pendingPersist.entries()) { + clearTimeout(timeoutId); + persistPromises.push(this.persistToDatabase(workflowId)); + this.pendingPersist.delete(workflowId); } + + await Promise.all(persistPromises); } } diff --git a/apps/api/src/routes/datasets.ts b/apps/api/src/routes/datasets.ts index efa64c16..66ebaab9 100644 --- a/apps/api/src/routes/datasets.ts +++ b/apps/api/src/routes/datasets.ts @@ -26,7 +26,6 @@ import { getDatasets, updateDataset, } from "../db"; -import { developerModeMiddleware } from "../middleware/developer-mode"; // Extend the ApiContext with our custom variable type ExtendedApiContext = ApiContext & { @@ -38,7 +37,7 @@ type ExtendedApiContext = ApiContext & { const datasetRoutes = new Hono(); // Apply early access middleware to all dataset routes -datasetRoutes.use("*", jwtMiddleware, developerModeMiddleware); +datasetRoutes.use("*", jwtMiddleware); /** * List all datasets for the current organization diff --git a/apps/web/src/services/durable-workflow-service.ts b/apps/web/src/services/durable-workflow-service.ts index ec3cca0b..29eec443 100644 --- a/apps/web/src/services/durable-workflow-service.ts +++ b/apps/web/src/services/durable-workflow-service.ts @@ -33,6 +33,7 @@ export class WorkflowWebSocket { private maxReconnectAttempts = 5; private reconnectDelay = 1000; // Start with 1 second private shouldReconnect = true; + private currentState: WorkflowState | null = null; constructor( private orgHandle: string, @@ -70,6 +71,7 @@ export class WorkflowWebSocket { console.error("WebSocket error message:", message.error); this.options.onError?.(message.error || ""); } else if (message.type === "init") { + this.currentState = message.state; this.options.onInit?.(message.state); } else if (message.type === "execution_update") { this.options.onExecutionUpdate?.({ @@ -124,12 +126,25 @@ export class WorkflowWebSocket { return; } + if (!this.currentState) { + console.warn("No current state available, cannot send update"); + return; + } + try { - const updateMsg: WorkflowUpdateMessage = { - type: "update", + const updatedState: WorkflowState = { + ...this.currentState, nodes, edges, + timestamp: Date.now(), }; + + const updateMsg: WorkflowUpdateMessage = { + type: "update", + state: updatedState, + }; + + this.currentState = updatedState; this.ws.send(JSON.stringify(updateMsg)); } catch (error) { console.error("Failed to send WebSocket message:", error); diff --git a/packages/types/src/workflow.ts b/packages/types/src/workflow.ts index f89a65d0..dd1e07a7 100644 --- a/packages/types/src/workflow.ts +++ b/packages/types/src/workflow.ts @@ -490,8 +490,7 @@ export interface WorkflowInitMessage { */ export interface WorkflowUpdateMessage { type: "update"; - nodes: Node[]; - edges: Edge[]; + state: WorkflowState; } /** From 5346421d2bc3598955d7a34105234cf992ec35d7 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sun, 5 Oct 2025 00:20:46 +0200 Subject: [PATCH 15/29] Rename the user session to workflow session --- apps/api/src/context.ts | 4 +-- .../{user-session.ts => workflow-session.ts} | 34 +++++++++---------- apps/api/src/index.ts | 2 +- apps/api/src/routes/workflows.ts | 8 ++--- apps/api/src/routes/ws.ts | 4 +-- apps/api/src/utils/encryption.test.ts | 4 +-- apps/api/wrangler.jsonc | 10 +++--- 7 files changed, 33 insertions(+), 33 deletions(-) rename apps/api/src/durable-objects/{user-session.ts => workflow-session.ts} (90%) diff --git a/apps/api/src/context.ts b/apps/api/src/context.ts index 10febf1b..f427e979 100644 --- a/apps/api/src/context.ts +++ b/apps/api/src/context.ts @@ -1,6 +1,6 @@ import { JWTTokenPayload } from "@dafthunk/types"; -import { UserSession } from "./durable-objects/user-session"; +import { WorkflowSession } from "./durable-objects/workflow-session"; import { RuntimeParams } from "./runtime/runtime"; export interface Bindings { @@ -10,7 +10,7 @@ export interface Bindings { RATE_LIMIT_AUTH: RateLimit; RATE_LIMIT_EXECUTE: RateLimit; EXECUTE: Workflow; - USER_SESSION: DurableObjectNamespace; + WORKFLOW_SESSION: DurableObjectNamespace; RESSOURCES: R2Bucket; DATASETS: R2Bucket; DATASETS_AUTORAG: string; diff --git a/apps/api/src/durable-objects/user-session.ts b/apps/api/src/durable-objects/workflow-session.ts similarity index 90% rename from apps/api/src/durable-objects/user-session.ts rename to apps/api/src/durable-objects/workflow-session.ts index 96943165..e021d9b8 100644 --- a/apps/api/src/durable-objects/user-session.ts +++ b/apps/api/src/durable-objects/workflow-session.ts @@ -14,15 +14,15 @@ import { Bindings } from "../context"; import { createDatabase } from "../db/index"; import { getWorkflowWithUserAccess, updateWorkflow } from "../db/queries"; -interface WorkflowSession { - state: WorkflowState; +interface WorkflowSessionPair { + workflowState: WorkflowState; organizationId: string; } -export class UserSession extends DurableObject { +export class WorkflowSession extends DurableObject { private static readonly PERSIST_DEBOUNCE_MS = 500; - private workflows: Map = new Map(); + private workflows: Map = new Map(); private pendingPersist: Map = new Map(); constructor(ctx: DurableObjectState, env: Bindings) { @@ -59,7 +59,7 @@ export class UserSession extends DurableObject { timestamp, }; - this.workflows.set(workflowId, { state, organizationId }); + this.workflows.set(workflowId, { workflowState: state, organizationId }); } private extractWorkflowData(workflow: any, workflowId: string) { @@ -82,7 +82,7 @@ export class UserSession extends DurableObject { throw new Error(`Workflow ${workflowId} not loaded`); } - return session.state; + return session.workflowState; } async updateState(state: WorkflowState): Promise { @@ -92,8 +92,8 @@ export class UserSession extends DurableObject { } // Validate incoming state matches session - if (state.id !== session.state.id) { - throw new Error(`Workflow ID mismatch: expected ${session.state.id}, got ${state.id}`); + if (state.id !== session.workflowState.id) { + throw new Error(`Workflow ID mismatch: expected ${session.workflowState.id}, got ${state.id}`); } // Validate required fields @@ -106,7 +106,7 @@ export class UserSession extends DurableObject { throw new Error("Invalid state: nodes and edges must be arrays"); } - this.workflows.set(state.id, { ...session, state }); + this.workflows.set(state.id, { ...session, workflowState: state }); // Debounce persistence to reduce D1 writes on rapid updates this.schedulePersist(state.id); @@ -126,7 +126,7 @@ export class UserSession extends DurableObject { const timeoutId = setTimeout(() => { this.persistToDatabase(workflowId); this.pendingPersist.delete(workflowId); - }, UserSession.PERSIST_DEBOUNCE_MS) as unknown as number; + }, WorkflowSession.PERSIST_DEBOUNCE_MS) as unknown as number; this.pendingPersist.set(workflowId, timeoutId); } @@ -144,14 +144,14 @@ export class UserSession extends DurableObject { try { const db = createDatabase(this.env.DB); await updateWorkflow(db, workflowId, session.organizationId, { - name: session.state.name, + name: session.workflowState.name, data: { - id: session.state.id, - name: session.state.name, - handle: session.state.handle, - type: session.state.type, - nodes: session.state.nodes, - edges: session.state.edges, + id: session.workflowState.id, + name: session.workflowState.name, + handle: session.workflowState.handle, + type: session.workflowState.type, + nodes: session.workflowState.nodes, + edges: session.workflowState.edges, }, }); diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index b14f5bc6..49ea52ab 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,5 +1,5 @@ import { Hono } from "hono"; -export { UserSession } from "./durable-objects/user-session"; +export { WorkflowSession } from "./durable-objects/workflow-session"; export { Runtime } from "./runtime/runtime"; import auth from "./auth"; import { ApiContext } from "./context"; diff --git a/apps/api/src/routes/workflows.ts b/apps/api/src/routes/workflows.ts index 83a60b28..048b379b 100644 --- a/apps/api/src/routes/workflows.ts +++ b/apps/api/src/routes/workflows.ts @@ -168,8 +168,8 @@ workflowRoutes.get("/:id", jwtMiddleware, async (c) => { try { // Try Durable Object first - const doId = c.env.USER_SESSION.idFromName(`${userId}-${id}`); - const stub: any = c.env.USER_SESSION.get(doId); + const doId = c.env.WORKFLOW_SESSION.idFromName(`${userId}-${id}`); + const stub: any = c.env.WORKFLOW_SESSION.get(doId); let workflowData: any = null; try { workflowData = await stub.getState(); @@ -509,10 +509,10 @@ workflowRoutes.post( if (version === "dev") { // Get workflow data from Durable Object first (userId already defined above) - const doId = c.env.USER_SESSION.idFromName( + const doId = c.env.WORKFLOW_SESSION.idFromName( `${userId}-${workflowIdOrHandle}` ); - const stub: any = c.env.USER_SESSION.get(doId); + const stub: any = c.env.WORKFLOW_SESSION.get(doId); try { const state = await stub.getState(); diff --git a/apps/api/src/routes/ws.ts b/apps/api/src/routes/ws.ts index de63aaa8..4b7a2a85 100644 --- a/apps/api/src/routes/ws.ts +++ b/apps/api/src/routes/ws.ts @@ -20,8 +20,8 @@ wsRoutes.get("/:workflowId", jwtMiddleware, async (c) => { } // Create a unique DO ID for this user - const doId = c.env.USER_SESSION.idFromName(userId); - const stub = c.env.USER_SESSION.get(doId); + const doId = c.env.WORKFLOW_SESSION.idFromName(userId); + const stub = c.env.WORKFLOW_SESSION.get(doId); // Pass the original request with userId in a custom header const headers = new Headers(c.req.raw.headers); diff --git a/apps/api/src/utils/encryption.test.ts b/apps/api/src/utils/encryption.test.ts index a5ecc96c..430c8997 100644 --- a/apps/api/src/utils/encryption.test.ts +++ b/apps/api/src/utils/encryption.test.ts @@ -5,7 +5,7 @@ import { beforeEach, describe, expect, it } from "vitest"; import { Bindings } from "../context"; -import { UserSession } from "../durable-objects/user-session"; +import { WorkflowSession } from "../durable-objects/workflow-session"; import { decryptSecret, encryptSecret } from "./encryption"; // Mock Bindings for testing @@ -21,7 +21,7 @@ const createMockEnv = (masterKey?: string): Bindings => ({ RATE_LIMIT_AUTH: {} as RateLimit, RATE_LIMIT_EXECUTE: {} as RateLimit, EXECUTE: {} as Workflow, - USER_SESSION: {} as DurableObjectNamespace, + WORKFLOW_SESSION: {} as DurableObjectNamespace, RESSOURCES: {} as R2Bucket, DATASETS: {} as R2Bucket, DATASETS_AUTORAG: "", diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index 7225af4e..9f67e168 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -70,8 +70,8 @@ "durable_objects": { "bindings": [ { - "name": "USER_SESSION", - "class_name": "UserSession", + "name": "WORKFLOW_SESSION", + "class_name": "WorkflowSession", "script_name": "dafthunk-api" } ] @@ -79,7 +79,7 @@ "migrations": [ { "tag": "v1", - "new_sqlite_classes": ["UserSession"] + "new_sqlite_classes": ["WorkflowSession"] } ], "unsafe": { @@ -173,8 +173,8 @@ "durable_objects": { "bindings": [ { - "name": "USER_SESSION", - "class_name": "UserSession", + "name": "WORKFLOW_SESSION", + "class_name": "WorkflowSession", "script_name": "dafthunk-api" } ] From 25f2d1b605dd4a8a6a211ae109a4cfee9754360a Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sun, 5 Oct 2025 01:16:53 +0200 Subject: [PATCH 16/29] Handle update message in the frontend --- .../src/durable-objects/workflow-session.ts | 138 ++++++++++-------- apps/api/src/routes/ws.ts | 6 +- apps/web/src/hooks/use-editable-workflow.ts | 91 ++++++------ ...service.ts => workflow-session-service.ts} | 5 + 4 files changed, 133 insertions(+), 107 deletions(-) rename apps/web/src/services/{durable-workflow-service.ts => workflow-session-service.ts} (96%) diff --git a/apps/api/src/durable-objects/workflow-session.ts b/apps/api/src/durable-objects/workflow-session.ts index e021d9b8..0300279c 100644 --- a/apps/api/src/durable-objects/workflow-session.ts +++ b/apps/api/src/durable-objects/workflow-session.ts @@ -14,16 +14,13 @@ import { Bindings } from "../context"; import { createDatabase } from "../db/index"; import { getWorkflowWithUserAccess, updateWorkflow } from "../db/queries"; -interface WorkflowSessionPair { - workflowState: WorkflowState; - organizationId: string; -} - export class WorkflowSession extends DurableObject { private static readonly PERSIST_DEBOUNCE_MS = 500; - private workflows: Map = new Map(); - private pendingPersist: Map = new Map(); + private state: WorkflowState | null = null; + private organizationId: string | null = null; + private pendingPersistTimeout: number | undefined = undefined; + private connectedUsers: Set = new Set(); constructor(ctx: DurableObjectState, env: Bindings) { super(ctx, env); @@ -49,7 +46,7 @@ export class WorkflowSession extends DurableObject { const { name, handle, type, nodes, edges, timestamp } = this.extractWorkflowData(workflow, workflowId); - const state: WorkflowState = { + this.state = { id: workflowId, name, handle, @@ -59,7 +56,7 @@ export class WorkflowSession extends DurableObject { timestamp, }; - this.workflows.set(workflowId, { workflowState: state, organizationId }); + this.organizationId = organizationId; } private extractWorkflowData(workflow: any, workflowId: string) { @@ -74,26 +71,24 @@ export class WorkflowSession extends DurableObject { } /** - * Get state from memory for a specific workflow + * Get state from memory */ - async getState(workflowId: string): Promise { - const session = this.workflows.get(workflowId); - if (!session) { - throw new Error(`Workflow ${workflowId} not loaded`); + async getState(): Promise { + if (!this.state) { + throw new Error("Workflow not loaded"); } - return session.workflowState; + return this.state; } async updateState(state: WorkflowState): Promise { - const session = this.workflows.get(state.id); - if (!session) { - throw new Error(`Workflow ${state.id} not loaded`); + if (!this.state) { + throw new Error("Workflow not loaded"); } - // Validate incoming state matches session - if (state.id !== session.workflowState.id) { - throw new Error(`Workflow ID mismatch: expected ${session.workflowState.id}, got ${state.id}`); + // Validate incoming state matches current state + if (state.id !== this.state.id) { + throw new Error(`Workflow ID mismatch: expected ${this.state.id}, got ${state.id}`); } // Validate required fields @@ -106,56 +101,73 @@ export class WorkflowSession extends DurableObject { throw new Error("Invalid state: nodes and edges must be arrays"); } - this.workflows.set(state.id, { ...session, workflowState: state }); + this.state = state; + + // Broadcast to all connected users + this.broadcast(state); // Debounce persistence to reduce D1 writes on rapid updates - this.schedulePersist(state.id); + this.schedulePersist(); } /** - * Schedule a debounced persist for a workflow + * Broadcast state update to all connected users */ - private schedulePersist(workflowId: string): void { + private broadcast(state: WorkflowState): void { + const updateMsg: WorkflowUpdateMessage = { + type: "update", + state, + }; + const message = JSON.stringify(updateMsg); + + for (const ws of this.connectedUsers) { + try { + ws.send(message); + } catch (error) { + console.error("Error broadcasting to WebSocket:", error); + } + } + } + + /** + * Schedule a debounced persist + */ + private schedulePersist(): void { // Clear any existing timeout - const existingTimeout = this.pendingPersist.get(workflowId); - if (existingTimeout !== undefined) { - clearTimeout(existingTimeout); + if (this.pendingPersistTimeout !== undefined) { + clearTimeout(this.pendingPersistTimeout); } // Schedule new persist - const timeoutId = setTimeout(() => { - this.persistToDatabase(workflowId); - this.pendingPersist.delete(workflowId); + this.pendingPersistTimeout = setTimeout(() => { + this.persistToDatabase(); + this.pendingPersistTimeout = undefined; }, WorkflowSession.PERSIST_DEBOUNCE_MS) as unknown as number; - - this.pendingPersist.set(workflowId, timeoutId); } /** * Persist state back to D1 database */ - private async persistToDatabase(workflowId: string): Promise { - const session = this.workflows.get(workflowId); - - if (!session) { + private async persistToDatabase(): Promise { + if (!this.state || !this.organizationId) { return; } try { const db = createDatabase(this.env.DB); - await updateWorkflow(db, workflowId, session.organizationId, { - name: session.workflowState.name, + await updateWorkflow(db, this.state.id, this.organizationId, { + name: this.state.name, data: { - id: session.workflowState.id, - name: session.workflowState.name, - handle: session.workflowState.handle, - type: session.workflowState.type, - nodes: session.workflowState.nodes, - edges: session.workflowState.edges, + id: this.state.id, + name: this.state.name, + handle: this.state.handle, + type: this.state.type, + nodes: this.state.nodes, + edges: this.state.edges, }, }); - console.log(`Persisted workflow ${workflowId} to D1 database`); + console.log(`Persisted workflow ${this.state.id} to D1 database`); } catch (error) { console.error("Error persisting workflow to database:", error); } @@ -184,7 +196,7 @@ export class WorkflowSession extends DurableObject { } // Only load if not already in memory - if (!this.workflows.has(workflowId)) { + if (!this.state) { try { await this.loadState(workflowId, userId); } catch (error) { @@ -200,12 +212,12 @@ export class WorkflowSession extends DurableObject { } if (url.pathname.endsWith("/state") && request.method === "GET") { - return this.handleStateRequest(workflowId); + return this.handleStateRequest(); } const upgradeHeader = request.headers.get("Upgrade"); if (upgradeHeader === "websocket") { - return this.handleWebSocketUpgrade(workflowId); + return this.handleWebSocketUpgrade(request); } return new Response("Expected /state GET or WebSocket upgrade", { @@ -213,9 +225,9 @@ export class WorkflowSession extends DurableObject { }); } - private async handleStateRequest(workflowId: string): Promise { + private async handleStateRequest(): Promise { try { - const state = await this.getState(workflowId); + const state = await this.getState(); return Response.json(state); } catch (error) { console.error("Error getting workflow state:", error); @@ -229,13 +241,14 @@ export class WorkflowSession extends DurableObject { } } - private async handleWebSocketUpgrade(workflowId: string): Promise { + private async handleWebSocketUpgrade(request: Request): Promise { const webSocketPair = new WebSocketPair(); const [client, server] = Object.values(webSocketPair); this.ctx.acceptWebSocket(server); + this.connectedUsers.add(server); - const initState = await this.getState(workflowId); + const initState = await this.getState(); const initMessage: WorkflowInitMessage = { type: "init", state: initState, @@ -277,20 +290,19 @@ export class WorkflowSession extends DurableObject { } async webSocketClose( - _ws: WebSocket, + ws: WebSocket, _code: number, _reason: string, _wasClean: boolean ) { - // Flush all pending persists when connection closes - const persistPromises: Promise[] = []; - - for (const [workflowId, timeoutId] of this.pendingPersist.entries()) { - clearTimeout(timeoutId); - persistPromises.push(this.persistToDatabase(workflowId)); - this.pendingPersist.delete(workflowId); + // Remove WebSocket from connected users + this.connectedUsers.delete(ws); + + // Flush pending persist when connection closes + if (this.pendingPersistTimeout !== undefined) { + clearTimeout(this.pendingPersistTimeout); + await this.persistToDatabase(); + this.pendingPersistTimeout = undefined; } - - await Promise.all(persistPromises); } } diff --git a/apps/api/src/routes/ws.ts b/apps/api/src/routes/ws.ts index 4b7a2a85..3e314dc7 100644 --- a/apps/api/src/routes/ws.ts +++ b/apps/api/src/routes/ws.ts @@ -19,8 +19,10 @@ wsRoutes.get("/:workflowId", jwtMiddleware, async (c) => { return c.json({ error: "Unauthorized" }, 401); } - // Create a unique DO ID for this user - const doId = c.env.WORKFLOW_SESSION.idFromName(userId); + const workflowId = c.req.param("workflowId"); + + // Create a unique DO ID for this workflow + const doId = c.env.WORKFLOW_SESSION.idFromName(workflowId); const stub = c.env.WORKFLOW_SESSION.get(doId); // Pass the original request with userId in a custom header diff --git a/apps/web/src/hooks/use-editable-workflow.ts b/apps/web/src/hooks/use-editable-workflow.ts index a323ba90..13c6da0a 100644 --- a/apps/web/src/hooks/use-editable-workflow.ts +++ b/apps/web/src/hooks/use-editable-workflow.ts @@ -12,7 +12,7 @@ import { connectWorkflowWS, WorkflowState, WorkflowWebSocket, -} from "@/services/durable-workflow-service.ts"; +} from "@/services/workflow-session-service.ts"; import { adaptDeploymentNodesToReactFlowNodes } from "@/utils/utils"; interface UseEditableWorkflowProps { @@ -61,49 +61,56 @@ export function useEditableWorkflow({ return; } + const handleStateUpdate = (state: WorkflowState) => { + try { + // Store workflow metadata + if (state.id && state.type) { + setWorkflowMetadata({ + id: state.id, + name: state.name || "", + handle: state.handle || "", + type: state.type, + }); + } + + // Convert to ReactFlow format + const reactFlowNodes = adaptDeploymentNodesToReactFlowNodes( + state.nodes, + nodeTemplates + ); + const reactFlowEdges = state.edges.map( + (edge: any, index: number) => ({ + id: `e${index}`, + source: edge.source, + target: edge.target, + sourceHandle: edge.sourceOutput, + targetHandle: edge.targetInput, + type: "workflowEdge", + data: { + isValid: true, + sourceType: edge.sourceOutput, + targetType: edge.targetInput, + }, + }) + ); + + setNodes(reactFlowNodes); + setEdges(reactFlowEdges); + setProcessingError(null); + } catch (error) { + console.error("Error processing WebSocket state:", error); + setProcessingError("Failed to load state from WebSocket"); + } + }; + const ws = connectWorkflowWS(organization.handle, workflowId, { onInit: (state: WorkflowState) => { - try { - // Store workflow metadata - if (state.id && state.type) { - setWorkflowMetadata({ - id: state.id, - name: state.name || "", - handle: state.handle || "", - type: state.type, - }); - } - - // Convert to ReactFlow format - const reactFlowNodes = adaptDeploymentNodesToReactFlowNodes( - state.nodes, - nodeTemplates - ); - const reactFlowEdges = state.edges.map( - (edge: any, index: number) => ({ - id: `e${index}`, - source: edge.source, - target: edge.target, - sourceHandle: edge.sourceOutput, - targetHandle: edge.targetInput, - type: "workflowEdge", - data: { - isValid: true, - sourceType: edge.sourceOutput, - targetType: edge.targetInput, - }, - }) - ); - - setNodes(reactFlowNodes); - setEdges(reactFlowEdges); - setProcessingError(null); - setIsInitializing(false); - } catch (error) { - console.error("Error processing WebSocket state:", error); - setProcessingError("Failed to load state from WebSocket"); - setIsInitializing(false); - } + handleStateUpdate(state); + setIsInitializing(false); + }, + onUpdate: (state: WorkflowState) => { + // Handle broadcasts from other users + handleStateUpdate(state); }, onOpen: () => { setIsWSConnected(true); diff --git a/apps/web/src/services/durable-workflow-service.ts b/apps/web/src/services/workflow-session-service.ts similarity index 96% rename from apps/web/src/services/durable-workflow-service.ts rename to apps/web/src/services/workflow-session-service.ts index 29eec443..a605f36d 100644 --- a/apps/web/src/services/durable-workflow-service.ts +++ b/apps/web/src/services/workflow-session-service.ts @@ -16,11 +16,13 @@ export type { WorkflowState }; type WebSocketMessage = | WorkflowInitMessage + | WorkflowUpdateMessage | WorkflowErrorMessage | WorkflowExecutionUpdateMessage; export interface WorkflowWSOptions { onInit?: (state: WorkflowState) => void; + onUpdate?: (state: WorkflowState) => void; onError?: (error: string) => void; onClose?: () => void; onOpen?: () => void; @@ -73,6 +75,9 @@ export class WorkflowWebSocket { } else if (message.type === "init") { this.currentState = message.state; this.options.onInit?.(message.state); + } else if (message.type === "update") { + this.currentState = message.state; + this.options.onUpdate?.(message.state); } else if (message.type === "execution_update") { this.options.onExecutionUpdate?.({ id: message.executionId, From c09689873d0dd8970dbe6437d0f866e7207aae1e Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sun, 5 Oct 2025 23:28:03 +0200 Subject: [PATCH 17/29] Refactor code for improved readability and consistency in error handling --- apps/api/src/db/queries.ts | 9 ++++++-- .../src/durable-objects/workflow-session.ts | 21 ++++++++++--------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/apps/api/src/db/queries.ts b/apps/api/src/db/queries.ts index 880d1f23..d55d03d1 100644 --- a/apps/api/src/db/queries.ts +++ b/apps/api/src/db/queries.ts @@ -321,7 +321,10 @@ export async function getWorkflowWithUserAccess( organizationId: workflows.organizationId, }) .from(workflows) - .innerJoin(memberships, eq(workflows.organizationId, memberships.organizationId)) + .innerJoin( + memberships, + eq(workflows.organizationId, memberships.organizationId) + ) .where( and( eq(memberships.userId, userId), @@ -330,7 +333,9 @@ export async function getWorkflowWithUserAccess( ) .limit(1); - return result ? { workflow: result.workflow, organizationId: result.organizationId } : undefined; + return result + ? { workflow: result.workflow, organizationId: result.organizationId } + : undefined; } /** diff --git a/apps/api/src/durable-objects/workflow-session.ts b/apps/api/src/durable-objects/workflow-session.ts index 0300279c..cf2e3259 100644 --- a/apps/api/src/durable-objects/workflow-session.ts +++ b/apps/api/src/durable-objects/workflow-session.ts @@ -1,6 +1,4 @@ import { - Edge, - Node, WorkflowErrorMessage, WorkflowInitMessage, WorkflowMessage, @@ -29,16 +27,15 @@ export class WorkflowSession extends DurableObject { /** * Load workflow from D1 database with user access verification */ - private async loadState( - workflowId: string, - userId: string - ): Promise { + private async loadState(workflowId: string, userId: string): Promise { console.log(`Loading workflow ${workflowId} for user ${userId}`); const db = createDatabase(this.env.DB); const result = await getWorkflowWithUserAccess(db, workflowId, userId); if (!result) { - throw new Error(`User ${userId} does not have access to workflow ${workflowId}`); + throw new Error( + `User ${userId} does not have access to workflow ${workflowId}` + ); } const { workflow, organizationId } = result; @@ -88,12 +85,16 @@ export class WorkflowSession extends DurableObject { // Validate incoming state matches current state if (state.id !== this.state.id) { - throw new Error(`Workflow ID mismatch: expected ${this.state.id}, got ${state.id}`); + throw new Error( + `Workflow ID mismatch: expected ${this.state.id}, got ${state.id}` + ); } // Validate required fields if (!state.name || !state.handle || !state.type) { - throw new Error("Invalid state: missing required fields (name, handle, or type)"); + throw new Error( + "Invalid state: missing required fields (name, handle, or type)" + ); } // Validate arrays are present @@ -241,7 +242,7 @@ export class WorkflowSession extends DurableObject { } } - private async handleWebSocketUpgrade(request: Request): Promise { + private async handleWebSocketUpgrade(_request: Request): Promise { const webSocketPair = new WebSocketPair(); const [client, server] = Object.values(webSocketPair); From f28bcde76d8c795ccf03d287111a3ee2586f8971 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sun, 5 Oct 2025 23:40:12 +0200 Subject: [PATCH 18/29] Remove unused debounce function from utils --- apps/api/src/middleware/developer-mode.ts | 22 ---------------------- apps/web/src/utils/utils.ts | 11 ----------- 2 files changed, 33 deletions(-) delete mode 100644 apps/api/src/middleware/developer-mode.ts diff --git a/apps/api/src/middleware/developer-mode.ts b/apps/api/src/middleware/developer-mode.ts deleted file mode 100644 index cc01e3cf..00000000 --- a/apps/api/src/middleware/developer-mode.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { Context } from "hono"; - -import { ApiContext } from "../context"; - -export const developerModeMiddleware = async ( - c: Context, - next: () => Promise -) => { - const jwtPayload = c.get("jwtPayload"); - - if (!jwtPayload?.developerMode) { - return c.json( - { - error: - "This feature is under development and accessible only to developers.", - }, - 403 - ); - } - - await next(); -}; diff --git a/apps/web/src/utils/utils.ts b/apps/web/src/utils/utils.ts index 28fbf891..438f3c20 100644 --- a/apps/web/src/utils/utils.ts +++ b/apps/web/src/utils/utils.ts @@ -18,17 +18,6 @@ export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)); } -export const debounce = ) => ReturnType>( - func: T, - wait: number -): ((...args: Parameters) => void) => { - let timeout: ReturnType; - return (...args: Parameters) => { - clearTimeout(timeout); - timeout = setTimeout(() => func(...args), wait); - }; -}; - // Helper function to extract and format parameters for the execution dialog export function extractDialogParametersFromNodes( nodes: Node[], From b2e1b285faaeada5be7d4e67ed0f8be10f8e8890 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Sun, 5 Oct 2025 23:59:40 +0200 Subject: [PATCH 19/29] Remove migrations section from wrangler configuration --- apps/api/wrangler.jsonc | 6 ------ 1 file changed, 6 deletions(-) diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index 9f67e168..ec7f6de2 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -76,12 +76,6 @@ } ] }, - "migrations": [ - { - "tag": "v1", - "new_sqlite_classes": ["WorkflowSession"] - } - ], "unsafe": { "bindings": [ { From 7a913f26f1a9e5ed3dc2d15c6460708c3bf72fad Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 00:16:52 +0200 Subject: [PATCH 20/29] Make the BROWSER binding optional --- apps/api/src/context.ts | 2 +- apps/api/worker-configuration.d.ts | 5730 ++++++++++++++++++++++++++++ 2 files changed, 5731 insertions(+), 1 deletion(-) create mode 100644 apps/api/worker-configuration.d.ts diff --git a/apps/api/src/context.ts b/apps/api/src/context.ts index f427e979..d3e2f21e 100644 --- a/apps/api/src/context.ts +++ b/apps/api/src/context.ts @@ -15,7 +15,7 @@ export interface Bindings { DATASETS: R2Bucket; DATASETS_AUTORAG: string; AI: Ai; - BROWSER: Fetcher; + BROWSER?: Fetcher; COMPUTE: AnalyticsEngineDataset; WEB_HOST: string; EMAIL_DOMAIN: string; diff --git a/apps/api/worker-configuration.d.ts b/apps/api/worker-configuration.d.ts new file mode 100644 index 00000000..80156f0d --- /dev/null +++ b/apps/api/worker-configuration.d.ts @@ -0,0 +1,5730 @@ +// Generated by Wrangler by running `wrangler types` (hash: c25374baea9e1f84be8ff0028404d497) +// Runtime types generated with workerd@1.20250409.0 2024-10-22 nodejs_compat +declare namespace Cloudflare { + interface Env { + KV: KVNamespace; + WEB_HOST: string; + EMAIL_DOMAIN: string; + SECRET_MASTER_KEY: string; + DATASETS_AUTORAG: string; + CLOUDFLARE_ENV: string; + CLOUDFLARE_ACCOUNT_ID: string; + CLOUDFLARE_API_TOKEN: string; + CLOUDFLARE_AI_GATEWAY_ID: string; + JWT_SECRET: string; + GITHUB_CLIENT_ID: string; + GITHUB_CLIENT_SECRET: string; + GOOGLE_CLIENT_ID: string; + GOOGLE_CLIENT_SECRET: string; + TWILIO_ACCOUNT_SID: string; + TWILIO_AUTH_TOKEN: string; + TWILIO_PHONE_NUMBER: string; + SENDGRID_API_KEY: string; + SENDGRID_DEFAULT_FROM: string; + RESEND_API_KEY: string; + RESEND_DEFAULT_FROM: string; + AWS_ACCESS_KEY_ID: string; + AWS_SECRET_ACCESS_KEY: string; + AWS_REGION: string; + SES_DEFAULT_FROM: string; + GEMINI_API_KEY: string; + HUGGINGFACE_API_KEY: string; + WORKFLOW_SESSION: DurableObjectNamespace /* WorkflowSession from dafthunk-api */; + RESSOURCES: R2Bucket; + DATASETS: R2Bucket; + DB: D1Database; + COMPUTE: AnalyticsEngineDataset; + RATE_LIMIT_DEFAULT: RateLimit; + RATE_LIMIT_AUTH: RateLimit; + RATE_LIMIT_EXECUTE: RateLimit; + AI: Ai; + EXECUTE: Workflow; + } +} +interface Env extends Cloudflare.Env {} + +// Begin runtime types +/*! ***************************************************************************** +Copyright (c) Cloudflare. All rights reserved. +Copyright (c) Microsoft Corporation. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +/* eslint-disable */ +// noinspection JSUnusedGlobalSymbols +declare var onmessage: never; +/** + * An abnormal event (called an exception) which occurs as a result of calling a method or accessing a property of a web API. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException) + */ +declare class DOMException extends Error { + constructor(message?: string, name?: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/message) */ + readonly message: string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/name) */ + readonly name: string; + /** + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/code) + */ + readonly code: number; + static readonly INDEX_SIZE_ERR: number; + static readonly DOMSTRING_SIZE_ERR: number; + static readonly HIERARCHY_REQUEST_ERR: number; + static readonly WRONG_DOCUMENT_ERR: number; + static readonly INVALID_CHARACTER_ERR: number; + static readonly NO_DATA_ALLOWED_ERR: number; + static readonly NO_MODIFICATION_ALLOWED_ERR: number; + static readonly NOT_FOUND_ERR: number; + static readonly NOT_SUPPORTED_ERR: number; + static readonly INUSE_ATTRIBUTE_ERR: number; + static readonly INVALID_STATE_ERR: number; + static readonly SYNTAX_ERR: number; + static readonly INVALID_MODIFICATION_ERR: number; + static readonly NAMESPACE_ERR: number; + static readonly INVALID_ACCESS_ERR: number; + static readonly VALIDATION_ERR: number; + static readonly TYPE_MISMATCH_ERR: number; + static readonly SECURITY_ERR: number; + static readonly NETWORK_ERR: number; + static readonly ABORT_ERR: number; + static readonly URL_MISMATCH_ERR: number; + static readonly QUOTA_EXCEEDED_ERR: number; + static readonly TIMEOUT_ERR: number; + static readonly INVALID_NODE_TYPE_ERR: number; + static readonly DATA_CLONE_ERR: number; + get stack(): any; + set stack(value: any); +} +type WorkerGlobalScopeEventMap = { + fetch: FetchEvent; + scheduled: ScheduledEvent; + queue: QueueEvent; + unhandledrejection: PromiseRejectionEvent; + rejectionhandled: PromiseRejectionEvent; +}; +declare abstract class WorkerGlobalScope extends EventTarget { + EventTarget: typeof EventTarget; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console) */ +interface Console { + "assert"(condition?: boolean, ...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/clear_static) */ + clear(): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/count_static) */ + count(label?: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/countreset_static) */ + countReset(label?: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/debug_static) */ + debug(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dir_static) */ + dir(item?: any, options?: any): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dirxml_static) */ + dirxml(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/error_static) */ + error(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/group_static) */ + group(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupcollapsed_static) */ + groupCollapsed(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupend_static) */ + groupEnd(): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/info_static) */ + info(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/log_static) */ + log(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/table_static) */ + table(tabularData?: any, properties?: string[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/time_static) */ + time(label?: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timeend_static) */ + timeEnd(label?: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timelog_static) */ + timeLog(label?: string, ...data: any[]): void; + timeStamp(label?: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/trace_static) */ + trace(...data: any[]): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/warn_static) */ + warn(...data: any[]): void; +} +declare const console: Console; +type BufferSource = ArrayBufferView | ArrayBuffer; +type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array | BigInt64Array | BigUint64Array; +declare namespace WebAssembly { + class CompileError extends Error { + constructor(message?: string); + } + class RuntimeError extends Error { + constructor(message?: string); + } + type ValueType = "anyfunc" | "externref" | "f32" | "f64" | "i32" | "i64" | "v128"; + interface GlobalDescriptor { + value: ValueType; + mutable?: boolean; + } + class Global { + constructor(descriptor: GlobalDescriptor, value?: any); + value: any; + valueOf(): any; + } + type ImportValue = ExportValue | number; + type ModuleImports = Record; + type Imports = Record; + type ExportValue = Function | Global | Memory | Table; + type Exports = Record; + class Instance { + constructor(module: Module, imports?: Imports); + readonly exports: Exports; + } + interface MemoryDescriptor { + initial: number; + maximum?: number; + shared?: boolean; + } + class Memory { + constructor(descriptor: MemoryDescriptor); + readonly buffer: ArrayBuffer; + grow(delta: number): number; + } + type ImportExportKind = "function" | "global" | "memory" | "table"; + interface ModuleExportDescriptor { + kind: ImportExportKind; + name: string; + } + interface ModuleImportDescriptor { + kind: ImportExportKind; + module: string; + name: string; + } + abstract class Module { + static customSections(module: Module, sectionName: string): ArrayBuffer[]; + static exports(module: Module): ModuleExportDescriptor[]; + static imports(module: Module): ModuleImportDescriptor[]; + } + type TableKind = "anyfunc" | "externref"; + interface TableDescriptor { + element: TableKind; + initial: number; + maximum?: number; + } + class Table { + constructor(descriptor: TableDescriptor, value?: any); + readonly length: number; + get(index: number): any; + grow(delta: number, value?: any): number; + set(index: number, value?: any): void; + } + function instantiate(module: Module, imports?: Imports): Promise; + function validate(bytes: BufferSource): boolean; +} +/** + * This ServiceWorker API interface represents the global execution context of a service worker. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ServiceWorkerGlobalScope) + */ +interface ServiceWorkerGlobalScope extends WorkerGlobalScope { + DOMException: typeof DOMException; + WorkerGlobalScope: typeof WorkerGlobalScope; + btoa(data: string): string; + atob(data: string): string; + setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; + setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; + clearTimeout(timeoutId: number | null): void; + setInterval(callback: (...args: any[]) => void, msDelay?: number): number; + setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; + clearInterval(timeoutId: number | null): void; + queueMicrotask(task: Function): void; + structuredClone(value: T, options?: StructuredSerializeOptions): T; + reportError(error: any): void; + fetch(input: RequestInfo | URL, init?: RequestInit): Promise; + self: ServiceWorkerGlobalScope; + crypto: Crypto; + caches: CacheStorage; + scheduler: Scheduler; + performance: Performance; + Cloudflare: Cloudflare; + readonly origin: string; + Event: typeof Event; + ExtendableEvent: typeof ExtendableEvent; + CustomEvent: typeof CustomEvent; + PromiseRejectionEvent: typeof PromiseRejectionEvent; + FetchEvent: typeof FetchEvent; + TailEvent: typeof TailEvent; + TraceEvent: typeof TailEvent; + ScheduledEvent: typeof ScheduledEvent; + MessageEvent: typeof MessageEvent; + CloseEvent: typeof CloseEvent; + ReadableStreamDefaultReader: typeof ReadableStreamDefaultReader; + ReadableStreamBYOBReader: typeof ReadableStreamBYOBReader; + ReadableStream: typeof ReadableStream; + WritableStream: typeof WritableStream; + WritableStreamDefaultWriter: typeof WritableStreamDefaultWriter; + TransformStream: typeof TransformStream; + ByteLengthQueuingStrategy: typeof ByteLengthQueuingStrategy; + CountQueuingStrategy: typeof CountQueuingStrategy; + ErrorEvent: typeof ErrorEvent; + EventSource: typeof EventSource; + ReadableStreamBYOBRequest: typeof ReadableStreamBYOBRequest; + ReadableStreamDefaultController: typeof ReadableStreamDefaultController; + ReadableByteStreamController: typeof ReadableByteStreamController; + WritableStreamDefaultController: typeof WritableStreamDefaultController; + TransformStreamDefaultController: typeof TransformStreamDefaultController; + CompressionStream: typeof CompressionStream; + DecompressionStream: typeof DecompressionStream; + TextEncoderStream: typeof TextEncoderStream; + TextDecoderStream: typeof TextDecoderStream; + Headers: typeof Headers; + Body: typeof Body; + Request: typeof Request; + Response: typeof Response; + WebSocket: typeof WebSocket; + WebSocketPair: typeof WebSocketPair; + WebSocketRequestResponsePair: typeof WebSocketRequestResponsePair; + AbortController: typeof AbortController; + AbortSignal: typeof AbortSignal; + TextDecoder: typeof TextDecoder; + TextEncoder: typeof TextEncoder; + navigator: Navigator; + Navigator: typeof Navigator; + URL: typeof URL; + URLSearchParams: typeof URLSearchParams; + URLPattern: typeof URLPattern; + Blob: typeof Blob; + File: typeof File; + FormData: typeof FormData; + Crypto: typeof Crypto; + SubtleCrypto: typeof SubtleCrypto; + CryptoKey: typeof CryptoKey; + CacheStorage: typeof CacheStorage; + Cache: typeof Cache; + FixedLengthStream: typeof FixedLengthStream; + IdentityTransformStream: typeof IdentityTransformStream; + HTMLRewriter: typeof HTMLRewriter; +} +declare function addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; +declare function removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; +/** + * Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) + */ +declare function dispatchEvent(event: WorkerGlobalScopeEventMap[keyof WorkerGlobalScopeEventMap]): boolean; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/btoa) */ +declare function btoa(data: string): string; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/atob) */ +declare function atob(data: string): string; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setTimeout) */ +declare function setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setTimeout) */ +declare function setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/clearTimeout) */ +declare function clearTimeout(timeoutId: number | null): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setInterval) */ +declare function setInterval(callback: (...args: any[]) => void, msDelay?: number): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setInterval) */ +declare function setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/clearInterval) */ +declare function clearInterval(timeoutId: number | null): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/queueMicrotask) */ +declare function queueMicrotask(task: Function): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/structuredClone) */ +declare function structuredClone(value: T, options?: StructuredSerializeOptions): T; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/reportError) */ +declare function reportError(error: any): void; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/fetch) */ +declare function fetch(input: RequestInfo | URL, init?: RequestInit): Promise; +declare const self: ServiceWorkerGlobalScope; +/** +* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. +* The Workers runtime implements the full surface of this API, but with some differences in +* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) +* compared to those implemented in most browsers. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) +*/ +declare const crypto: Crypto; +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare const caches: CacheStorage; +declare const scheduler: Scheduler; +/** +* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, +* as well as timing of subrequests and other operations. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) +*/ +declare const performance: Performance; +declare const Cloudflare: Cloudflare; +declare const origin: string; +declare const navigator: Navigator; +interface TestController { +} +interface ExecutionContext { + waitUntil(promise: Promise): void; + passThroughOnException(): void; + props: any; +} +type ExportedHandlerFetchHandler = (request: Request>, env: Env, ctx: ExecutionContext) => Response | Promise; +type ExportedHandlerTailHandler = (events: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTraceHandler = (traces: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTailStreamHandler = (event: TailStream.TailEvent, env: Env, ctx: ExecutionContext) => TailStream.TailEventHandlerType | Promise; +type ExportedHandlerScheduledHandler = (controller: ScheduledController, env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerQueueHandler = (batch: MessageBatch, env: Env, ctx: ExecutionContext) => void | Promise; +type ExportedHandlerTestHandler = (controller: TestController, env: Env, ctx: ExecutionContext) => void | Promise; +interface ExportedHandler { + fetch?: ExportedHandlerFetchHandler; + tail?: ExportedHandlerTailHandler; + trace?: ExportedHandlerTraceHandler; + tailStream?: ExportedHandlerTailStreamHandler; + scheduled?: ExportedHandlerScheduledHandler; + test?: ExportedHandlerTestHandler; + email?: EmailExportedHandler; + queue?: ExportedHandlerQueueHandler; +} +interface StructuredSerializeOptions { + transfer?: any[]; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent) */ +declare abstract class PromiseRejectionEvent extends Event { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/promise) */ + readonly promise: Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/reason) */ + readonly reason: any; +} +declare abstract class Navigator { + sendBeacon(url: string, body?: (ReadableStream | string | (ArrayBuffer | ArrayBufferView) | Blob | FormData | URLSearchParams | URLSearchParams)): boolean; + readonly userAgent: string; + readonly hardwareConcurrency: number; +} +/** +* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, +* as well as timing of subrequests and other operations. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) +*/ +interface Performance { + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancetimeorigin) */ + readonly timeOrigin: number; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancenow) */ + now(): number; +} +interface AlarmInvocationInfo { + readonly isRetry: boolean; + readonly retryCount: number; +} +interface Cloudflare { + readonly compatibilityFlags: Record; +} +interface DurableObject { + fetch(request: Request): Response | Promise; + alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; + webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; + webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; + webSocketError?(ws: WebSocket, error: unknown): void | Promise; +} +type DurableObjectStub = Fetcher & { + readonly id: DurableObjectId; + readonly name?: string; +}; +interface DurableObjectId { + toString(): string; + equals(other: DurableObjectId): boolean; + readonly name?: string; +} +interface DurableObjectNamespace { + newUniqueId(options?: DurableObjectNamespaceNewUniqueIdOptions): DurableObjectId; + idFromName(name: string): DurableObjectId; + idFromString(id: string): DurableObjectId; + get(id: DurableObjectId, options?: DurableObjectNamespaceGetDurableObjectOptions): DurableObjectStub; + jurisdiction(jurisdiction: DurableObjectJurisdiction): DurableObjectNamespace; +} +type DurableObjectJurisdiction = "eu" | "fedramp"; +interface DurableObjectNamespaceNewUniqueIdOptions { + jurisdiction?: DurableObjectJurisdiction; +} +type DurableObjectLocationHint = "wnam" | "enam" | "sam" | "weur" | "eeur" | "apac" | "oc" | "afr" | "me"; +interface DurableObjectNamespaceGetDurableObjectOptions { + locationHint?: DurableObjectLocationHint; +} +interface DurableObjectState { + waitUntil(promise: Promise): void; + readonly id: DurableObjectId; + readonly storage: DurableObjectStorage; + container?: Container; + blockConcurrencyWhile(callback: () => Promise): Promise; + acceptWebSocket(ws: WebSocket, tags?: string[]): void; + getWebSockets(tag?: string): WebSocket[]; + setWebSocketAutoResponse(maybeReqResp?: WebSocketRequestResponsePair): void; + getWebSocketAutoResponse(): WebSocketRequestResponsePair | null; + getWebSocketAutoResponseTimestamp(ws: WebSocket): Date | null; + setHibernatableWebSocketEventTimeout(timeoutMs?: number): void; + getHibernatableWebSocketEventTimeout(): number | null; + getTags(ws: WebSocket): string[]; + abort(reason?: string): void; +} +interface DurableObjectTransaction { + get(key: string, options?: DurableObjectGetOptions): Promise; + get(keys: string[], options?: DurableObjectGetOptions): Promise>; + list(options?: DurableObjectListOptions): Promise>; + put(key: string, value: T, options?: DurableObjectPutOptions): Promise; + put(entries: Record, options?: DurableObjectPutOptions): Promise; + delete(key: string, options?: DurableObjectPutOptions): Promise; + delete(keys: string[], options?: DurableObjectPutOptions): Promise; + rollback(): void; + getAlarm(options?: DurableObjectGetAlarmOptions): Promise; + setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; + deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; +} +interface DurableObjectStorage { + get(key: string, options?: DurableObjectGetOptions): Promise; + get(keys: string[], options?: DurableObjectGetOptions): Promise>; + list(options?: DurableObjectListOptions): Promise>; + put(key: string, value: T, options?: DurableObjectPutOptions): Promise; + put(entries: Record, options?: DurableObjectPutOptions): Promise; + delete(key: string, options?: DurableObjectPutOptions): Promise; + delete(keys: string[], options?: DurableObjectPutOptions): Promise; + deleteAll(options?: DurableObjectPutOptions): Promise; + transaction(closure: (txn: DurableObjectTransaction) => Promise): Promise; + getAlarm(options?: DurableObjectGetAlarmOptions): Promise; + setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; + deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; + sync(): Promise; + sql: SqlStorage; + transactionSync(closure: () => T): T; + getCurrentBookmark(): Promise; + getBookmarkForTime(timestamp: number | Date): Promise; + onNextSessionRestoreBookmark(bookmark: string): Promise; +} +interface DurableObjectListOptions { + start?: string; + startAfter?: string; + end?: string; + prefix?: string; + reverse?: boolean; + limit?: number; + allowConcurrency?: boolean; + noCache?: boolean; +} +interface DurableObjectGetOptions { + allowConcurrency?: boolean; + noCache?: boolean; +} +interface DurableObjectGetAlarmOptions { + allowConcurrency?: boolean; +} +interface DurableObjectPutOptions { + allowConcurrency?: boolean; + allowUnconfirmed?: boolean; + noCache?: boolean; +} +interface DurableObjectSetAlarmOptions { + allowConcurrency?: boolean; + allowUnconfirmed?: boolean; +} +declare class WebSocketRequestResponsePair { + constructor(request: string, response: string); + get request(): string; + get response(): string; +} +interface AnalyticsEngineDataset { + writeDataPoint(event?: AnalyticsEngineDataPoint): void; +} +interface AnalyticsEngineDataPoint { + indexes?: ((ArrayBuffer | string) | null)[]; + doubles?: number[]; + blobs?: ((ArrayBuffer | string) | null)[]; +} +/** + * An event which takes place in the DOM. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event) + */ +declare class Event { + constructor(type: string, init?: EventInit); + /** + * Returns the type of event, e.g. "click", "hashchange", or "submit". + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/type) + */ + get type(): string; + /** + * Returns the event's phase, which is one of NONE, CAPTURING_PHASE, AT_TARGET, and BUBBLING_PHASE. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/eventPhase) + */ + get eventPhase(): number; + /** + * Returns true or false depending on how event was initialized. True if event invokes listeners past a ShadowRoot node that is the root of its target, and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composed) + */ + get composed(): boolean; + /** + * Returns true or false depending on how event was initialized. True if event goes through its target's ancestors in reverse tree order, and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/bubbles) + */ + get bubbles(): boolean; + /** + * Returns true or false depending on how event was initialized. Its return value does not always carry meaning, but true can indicate that part of the operation during which event was dispatched, can be canceled by invoking the preventDefault() method. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelable) + */ + get cancelable(): boolean; + /** + * Returns true if preventDefault() was invoked successfully to indicate cancelation, and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/defaultPrevented) + */ + get defaultPrevented(): boolean; + /** + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/returnValue) + */ + get returnValue(): boolean; + /** + * Returns the object whose event listener's callback is currently being invoked. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/currentTarget) + */ + get currentTarget(): EventTarget | undefined; + /** + * Returns the object to which event is dispatched (its target). + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/target) + */ + get target(): EventTarget | undefined; + /** + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/srcElement) + */ + get srcElement(): EventTarget | undefined; + /** + * Returns the event's timestamp as the number of milliseconds measured relative to the time origin. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/timeStamp) + */ + get timeStamp(): number; + /** + * Returns true if event was dispatched by the user agent, and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/isTrusted) + */ + get isTrusted(): boolean; + /** + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) + */ + get cancelBubble(): boolean; + /** + * @deprecated + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) + */ + set cancelBubble(value: boolean); + /** + * Invoking this method prevents event from reaching any registered event listeners after the current one finishes running and, when dispatched in a tree, also prevents event from reaching any other objects. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopImmediatePropagation) + */ + stopImmediatePropagation(): void; + /** + * If invoked when the cancelable attribute value is true, and while executing a listener for the event with passive set to false, signals to the operation that caused event to be dispatched that it needs to be canceled. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/preventDefault) + */ + preventDefault(): void; + /** + * When dispatched in a tree, invoking this method prevents event from reaching any objects other than the current object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopPropagation) + */ + stopPropagation(): void; + /** + * Returns the invocation target objects of event's path (objects on which listeners will be invoked), except for any nodes in shadow trees of which the shadow root's mode is "closed" that are not reachable from event's currentTarget. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composedPath) + */ + composedPath(): EventTarget[]; + static readonly NONE: number; + static readonly CAPTURING_PHASE: number; + static readonly AT_TARGET: number; + static readonly BUBBLING_PHASE: number; +} +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} +type EventListener = (event: EventType) => void; +interface EventListenerObject { + handleEvent(event: EventType): void; +} +type EventListenerOrEventListenerObject = EventListener | EventListenerObject; +/** + * EventTarget is a DOM interface implemented by objects that can receive events and may have listeners for them. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget) + */ +declare class EventTarget = Record> { + constructor(); + /** + * Appends an event listener for events whose type attribute value is type. The callback argument sets the callback that will be invoked when the event is dispatched. + * + * The options argument sets listener-specific options. For compatibility this can be a boolean, in which case the method behaves exactly as if the value was specified as options's capture. + * + * When set to true, options's capture prevents callback from being invoked when the event's eventPhase attribute value is BUBBLING_PHASE. When false (or not present), callback will not be invoked when event's eventPhase attribute value is CAPTURING_PHASE. Either way, callback will be invoked if event's eventPhase attribute value is AT_TARGET. + * + * When set to true, options's passive indicates that the callback will not cancel the event by invoking preventDefault(). This is used to enable performance optimizations described in § 2.8 Observing event listeners. + * + * When set to true, options's once indicates that the callback will only be invoked once after which the event listener will be removed. + * + * If an AbortSignal is passed for options's signal, then the event listener will be removed when signal is aborted. + * + * The event listener is appended to target's event listener list and is not appended if it has the same type, callback, and capture. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/addEventListener) + */ + addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; + /** + * Removes the event listener in target's event listener list with the same type, callback, and options. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/removeEventListener) + */ + removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; + /** + * Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) + */ + dispatchEvent(event: EventMap[keyof EventMap]): boolean; +} +interface EventTargetEventListenerOptions { + capture?: boolean; +} +interface EventTargetAddEventListenerOptions { + capture?: boolean; + passive?: boolean; + once?: boolean; + signal?: AbortSignal; +} +interface EventTargetHandlerObject { + handleEvent: (event: Event) => any | undefined; +} +/** + * A controller object that allows you to abort one or more DOM requests as and when desired. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController) + */ +declare class AbortController { + constructor(); + /** + * Returns the AbortSignal object associated with this object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/signal) + */ + get signal(): AbortSignal; + /** + * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/abort) + */ + abort(reason?: any): void; +} +/** + * A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal) + */ +declare abstract class AbortSignal extends EventTarget { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_static) */ + static abort(reason?: any): AbortSignal; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/timeout_static) */ + static timeout(delay: number): AbortSignal; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/any_static) */ + static any(signals: AbortSignal[]): AbortSignal; + /** + * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/aborted) + */ + get aborted(): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/reason) */ + get reason(): any; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ + get onabort(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ + set onabort(value: any | null); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/throwIfAborted) */ + throwIfAborted(): void; +} +interface Scheduler { + wait(delay: number, maybeOptions?: SchedulerWaitOptions): Promise; +} +interface SchedulerWaitOptions { + signal?: AbortSignal; +} +/** + * Extends the lifetime of the install and activate events dispatched on the global scope as part of the service worker lifecycle. This ensures that any functional events (like FetchEvent) are not dispatched until it upgrades database schemas and deletes the outdated cache entries. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent) + */ +declare abstract class ExtendableEvent extends Event { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent/waitUntil) */ + waitUntil(promise: Promise): void; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent) */ +declare class CustomEvent extends Event { + constructor(type: string, init?: CustomEventCustomEventInit); + /** + * Returns any custom data event was created with. Typically used for synthetic events. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent/detail) + */ + get detail(): T; +} +interface CustomEventCustomEventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; + detail?: any; +} +/** + * A file-like object of immutable, raw data. Blobs represent data that isn't necessarily in a JavaScript-native format. The File interface is based on Blob, inheriting blob functionality and expanding it to support files on the user's system. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob) + */ +declare class Blob { + constructor(type?: ((ArrayBuffer | ArrayBufferView) | string | Blob)[], options?: BlobOptions); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/size) */ + get size(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/type) */ + get type(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/slice) */ + slice(start?: number, end?: number, type?: string): Blob; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/arrayBuffer) */ + arrayBuffer(): Promise; + bytes(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/text) */ + text(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/stream) */ + stream(): ReadableStream; +} +interface BlobOptions { + type?: string; +} +/** + * Provides information about files and allows JavaScript in a web page to access their content. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File) + */ +declare class File extends Blob { + constructor(bits: ((ArrayBuffer | ArrayBufferView) | string | Blob)[] | undefined, name: string, options?: FileOptions); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/name) */ + get name(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/lastModified) */ + get lastModified(): number; +} +interface FileOptions { + type?: string; + lastModified?: number; +} +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare abstract class CacheStorage { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CacheStorage/open) */ + open(cacheName: string): Promise; + readonly default: Cache; +} +/** +* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) +*/ +declare abstract class Cache { + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#delete) */ + delete(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#match) */ + match(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; + /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#put) */ + put(request: RequestInfo | URL, response: Response): Promise; +} +interface CacheQueryOptions { + ignoreMethod?: boolean; +} +/** +* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. +* The Workers runtime implements the full surface of this API, but with some differences in +* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) +* compared to those implemented in most browsers. +* +* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) +*/ +declare abstract class Crypto { + /** + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/subtle) + */ + get subtle(): SubtleCrypto; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/getRandomValues) */ + getRandomValues(buffer: T): T; + /** + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/randomUUID) + */ + randomUUID(): string; + DigestStream: typeof DigestStream; +} +/** + * This Web Crypto API interface provides a number of low-level cryptographic functions. It is accessed via the Crypto.subtle properties available in a window context (via Window.crypto). + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto) + */ +declare abstract class SubtleCrypto { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/encrypt) */ + encrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, plainText: ArrayBuffer | ArrayBufferView): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/decrypt) */ + decrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, cipherText: ArrayBuffer | ArrayBufferView): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/sign) */ + sign(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, data: ArrayBuffer | ArrayBufferView): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/verify) */ + verify(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, signature: ArrayBuffer | ArrayBufferView, data: ArrayBuffer | ArrayBufferView): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/digest) */ + digest(algorithm: string | SubtleCryptoHashAlgorithm, data: ArrayBuffer | ArrayBufferView): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey) */ + generateKey(algorithm: string | SubtleCryptoGenerateKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey) */ + deriveKey(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, derivedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveBits) */ + deriveBits(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, length?: number | null): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey) */ + importKey(format: string, keyData: (ArrayBuffer | ArrayBufferView) | JsonWebKey, algorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/exportKey) */ + exportKey(format: string, key: CryptoKey): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/wrapKey) */ + wrapKey(format: string, key: CryptoKey, wrappingKey: CryptoKey, wrapAlgorithm: string | SubtleCryptoEncryptAlgorithm): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey) */ + unwrapKey(format: string, wrappedKey: ArrayBuffer | ArrayBufferView, unwrappingKey: CryptoKey, unwrapAlgorithm: string | SubtleCryptoEncryptAlgorithm, unwrappedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; + timingSafeEqual(a: ArrayBuffer | ArrayBufferView, b: ArrayBuffer | ArrayBufferView): boolean; +} +/** + * The CryptoKey dictionary of the Web Crypto API represents a cryptographic key. + * Available only in secure contexts. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey) + */ +declare abstract class CryptoKey { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/type) */ + readonly type: string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/extractable) */ + readonly extractable: boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/algorithm) */ + readonly algorithm: CryptoKeyKeyAlgorithm | CryptoKeyAesKeyAlgorithm | CryptoKeyHmacKeyAlgorithm | CryptoKeyRsaKeyAlgorithm | CryptoKeyEllipticKeyAlgorithm | CryptoKeyArbitraryKeyAlgorithm; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/usages) */ + readonly usages: string[]; +} +interface CryptoKeyPair { + publicKey: CryptoKey; + privateKey: CryptoKey; +} +interface JsonWebKey { + kty: string; + use?: string; + key_ops?: string[]; + alg?: string; + ext?: boolean; + crv?: string; + x?: string; + y?: string; + d?: string; + n?: string; + e?: string; + p?: string; + q?: string; + dp?: string; + dq?: string; + qi?: string; + oth?: RsaOtherPrimesInfo[]; + k?: string; +} +interface RsaOtherPrimesInfo { + r?: string; + d?: string; + t?: string; +} +interface SubtleCryptoDeriveKeyAlgorithm { + name: string; + salt?: (ArrayBuffer | ArrayBufferView); + iterations?: number; + hash?: (string | SubtleCryptoHashAlgorithm); + $public?: CryptoKey; + info?: (ArrayBuffer | ArrayBufferView); +} +interface SubtleCryptoEncryptAlgorithm { + name: string; + iv?: (ArrayBuffer | ArrayBufferView); + additionalData?: (ArrayBuffer | ArrayBufferView); + tagLength?: number; + counter?: (ArrayBuffer | ArrayBufferView); + length?: number; + label?: (ArrayBuffer | ArrayBufferView); +} +interface SubtleCryptoGenerateKeyAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + modulusLength?: number; + publicExponent?: (ArrayBuffer | ArrayBufferView); + length?: number; + namedCurve?: string; +} +interface SubtleCryptoHashAlgorithm { + name: string; +} +interface SubtleCryptoImportKeyAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + length?: number; + namedCurve?: string; + compressed?: boolean; +} +interface SubtleCryptoSignAlgorithm { + name: string; + hash?: (string | SubtleCryptoHashAlgorithm); + dataLength?: number; + saltLength?: number; +} +interface CryptoKeyKeyAlgorithm { + name: string; +} +interface CryptoKeyAesKeyAlgorithm { + name: string; + length: number; +} +interface CryptoKeyHmacKeyAlgorithm { + name: string; + hash: CryptoKeyKeyAlgorithm; + length: number; +} +interface CryptoKeyRsaKeyAlgorithm { + name: string; + modulusLength: number; + publicExponent: ArrayBuffer | ArrayBufferView; + hash?: CryptoKeyKeyAlgorithm; +} +interface CryptoKeyEllipticKeyAlgorithm { + name: string; + namedCurve: string; +} +interface CryptoKeyArbitraryKeyAlgorithm { + name: string; + hash?: CryptoKeyKeyAlgorithm; + namedCurve?: string; + length?: number; +} +declare class DigestStream extends WritableStream { + constructor(algorithm: string | SubtleCryptoHashAlgorithm); + readonly digest: Promise; + get bytesWritten(): number | bigint; +} +/** + * A decoder for a specific method, that is a specific character encoding, like utf-8, iso-8859-2, koi8, cp1261, gbk, etc. A decoder takes a stream of bytes as input and emits a stream of code points. For a more scalable, non-native library, see StringView – a C-like representation of strings based on typed arrays. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder) + */ +declare class TextDecoder { + constructor(label?: string, options?: TextDecoderConstructorOptions); + /** + * Returns the result of running encoding's decoder. The method can be invoked zero or more times with options's stream set to true, and then once without options's stream (or set to false), to process a fragmented input. If the invocation without options's stream (or set to false) has no input, it's clearest to omit both arguments. + * + * ``` + * var string = "", decoder = new TextDecoder(encoding), buffer; + * while(buffer = next_chunk()) { + * string += decoder.decode(buffer, {stream:true}); + * } + * string += decoder.decode(); // end-of-queue + * ``` + * + * If the error mode is "fatal" and encoding's decoder returns error, throws a TypeError. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder/decode) + */ + decode(input?: (ArrayBuffer | ArrayBufferView), options?: TextDecoderDecodeOptions): string; + get encoding(): string; + get fatal(): boolean; + get ignoreBOM(): boolean; +} +/** + * TextEncoder takes a stream of code points as input and emits a stream of bytes. For a more scalable, non-native library, see StringView – a C-like representation of strings based on typed arrays. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder) + */ +declare class TextEncoder { + constructor(); + /** + * Returns the result of running UTF-8's encoder. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encode) + */ + encode(input?: string): Uint8Array; + /** + * Runs the UTF-8 encoder on source, stores the result of that operation into destination, and returns the progress made as an object wherein read is the number of converted code units of source and written is the number of bytes modified in destination. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encodeInto) + */ + encodeInto(input: string, buffer: ArrayBuffer | ArrayBufferView): TextEncoderEncodeIntoResult; + get encoding(): string; +} +interface TextDecoderConstructorOptions { + fatal: boolean; + ignoreBOM: boolean; +} +interface TextDecoderDecodeOptions { + stream: boolean; +} +interface TextEncoderEncodeIntoResult { + read: number; + written: number; +} +/** + * Events providing information related to errors in scripts or in files. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent) + */ +declare class ErrorEvent extends Event { + constructor(type: string, init?: ErrorEventErrorEventInit); + get filename(): string; + get message(): string; + get lineno(): number; + get colno(): number; + get error(): any; +} +interface ErrorEventErrorEventInit { + message?: string; + filename?: string; + lineno?: number; + colno?: number; + error?: any; +} +/** + * Provides a way to easily construct a set of key/value pairs representing form fields and their values, which can then be easily sent using the XMLHttpRequest.send() method. It uses the same format a form would use if the encoding type were set to "multipart/form-data". + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData) + */ +declare class FormData { + constructor(); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) */ + append(name: string, value: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) */ + append(name: string, value: Blob, filename?: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/delete) */ + delete(name: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/get) */ + get(name: string): (File | string) | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/getAll) */ + getAll(name: string): (File | string)[]; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/has) */ + has(name: string): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) */ + set(name: string, value: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) */ + set(name: string, value: Blob, filename?: string): void; + /* Returns an array of key, value pairs for every entry in the list. */ + entries(): IterableIterator<[ + key: string, + value: File | string + ]>; + /* Returns a list of keys in the list. */ + keys(): IterableIterator; + /* Returns a list of values in the list. */ + values(): IterableIterator<(File | string)>; + forEach(callback: (this: This, value: File | string, key: string, parent: FormData) => void, thisArg?: This): void; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: File | string + ]>; +} +interface ContentOptions { + html?: boolean; +} +declare class HTMLRewriter { + constructor(); + on(selector: string, handlers: HTMLRewriterElementContentHandlers): HTMLRewriter; + onDocument(handlers: HTMLRewriterDocumentContentHandlers): HTMLRewriter; + transform(response: Response): Response; +} +interface HTMLRewriterElementContentHandlers { + element?(element: Element): void | Promise; + comments?(comment: Comment): void | Promise; + text?(element: Text): void | Promise; +} +interface HTMLRewriterDocumentContentHandlers { + doctype?(doctype: Doctype): void | Promise; + comments?(comment: Comment): void | Promise; + text?(text: Text): void | Promise; + end?(end: DocumentEnd): void | Promise; +} +interface Doctype { + readonly name: string | null; + readonly publicId: string | null; + readonly systemId: string | null; +} +interface Element { + tagName: string; + readonly attributes: IterableIterator; + readonly removed: boolean; + readonly namespaceURI: string; + getAttribute(name: string): string | null; + hasAttribute(name: string): boolean; + setAttribute(name: string, value: string): Element; + removeAttribute(name: string): Element; + before(content: string | ReadableStream | Response, options?: ContentOptions): Element; + after(content: string | ReadableStream | Response, options?: ContentOptions): Element; + prepend(content: string | ReadableStream | Response, options?: ContentOptions): Element; + append(content: string | ReadableStream | Response, options?: ContentOptions): Element; + replace(content: string | ReadableStream | Response, options?: ContentOptions): Element; + remove(): Element; + removeAndKeepContent(): Element; + setInnerContent(content: string | ReadableStream | Response, options?: ContentOptions): Element; + onEndTag(handler: (tag: EndTag) => void | Promise): void; +} +interface EndTag { + name: string; + before(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; + after(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; + remove(): EndTag; +} +interface Comment { + text: string; + readonly removed: boolean; + before(content: string, options?: ContentOptions): Comment; + after(content: string, options?: ContentOptions): Comment; + replace(content: string, options?: ContentOptions): Comment; + remove(): Comment; +} +interface Text { + readonly text: string; + readonly lastInTextNode: boolean; + readonly removed: boolean; + before(content: string | ReadableStream | Response, options?: ContentOptions): Text; + after(content: string | ReadableStream | Response, options?: ContentOptions): Text; + replace(content: string | ReadableStream | Response, options?: ContentOptions): Text; + remove(): Text; +} +interface DocumentEnd { + append(content: string, options?: ContentOptions): DocumentEnd; +} +/** + * This is the event type for fetch events dispatched on the service worker global scope. It contains information about the fetch, including the request and how the receiver will treat the response. It provides the event.respondWith() method, which allows us to provide a response to this fetch. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent) + */ +declare abstract class FetchEvent extends ExtendableEvent { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/request) */ + readonly request: Request; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/respondWith) */ + respondWith(promise: Response | Promise): void; + passThroughOnException(): void; +} +type HeadersInit = Headers | Iterable> | Record; +/** + * This Fetch API interface allows you to perform various actions on HTTP request and response headers. These actions include retrieving, setting, adding to, and removing. A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.  You can add to this using methods like append() (see Examples.) In all methods of this interface, header names are matched by case-insensitive byte sequence. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers) + */ +declare class Headers { + constructor(init?: HeadersInit); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/get) */ + get(name: string): string | null; + getAll(name: string): string[]; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/getSetCookie) */ + getSetCookie(): string[]; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/has) */ + has(name: string): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/set) */ + set(name: string, value: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/append) */ + append(name: string, value: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/delete) */ + delete(name: string): void; + forEach(callback: (this: This, value: string, key: string, parent: Headers) => void, thisArg?: This): void; + /* Returns an iterator allowing to go through all key/value pairs contained in this object. */ + entries(): IterableIterator<[ + key: string, + value: string + ]>; + /* Returns an iterator allowing to go through all keys of the key/value pairs contained in this object. */ + keys(): IterableIterator; + /* Returns an iterator allowing to go through all values of the key/value pairs contained in this object. */ + values(): IterableIterator; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: string + ]>; +} +type BodyInit = ReadableStream | string | ArrayBuffer | ArrayBufferView | Blob | URLSearchParams | FormData; +declare abstract class Body { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/body) */ + get body(): ReadableStream | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bodyUsed) */ + get bodyUsed(): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/arrayBuffer) */ + arrayBuffer(): Promise; + bytes(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/text) */ + text(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/json) */ + json(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/formData) */ + formData(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/blob) */ + blob(): Promise; +} +/** + * This Fetch API interface represents the response to a request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) + */ +declare var Response: { + prototype: Response; + new (body?: BodyInit | null, init?: ResponseInit): Response; + error(): Response; + redirect(url: string, status?: number): Response; + json(any: any, maybeInit?: (ResponseInit | Response)): Response; +}; +/** + * This Fetch API interface represents the response to a request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) + */ +interface Response extends Body { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/clone) */ + clone(): Response; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/status) */ + status: number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/statusText) */ + statusText: string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/headers) */ + headers: Headers; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/ok) */ + ok: boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/redirected) */ + redirected: boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/url) */ + url: string; + webSocket: WebSocket | null; + cf: any | undefined; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/type) */ + type: "default" | "error"; +} +interface ResponseInit { + status?: number; + statusText?: string; + headers?: HeadersInit; + cf?: any; + webSocket?: (WebSocket | null); + encodeBody?: "automatic" | "manual"; +} +type RequestInfo> = Request | string; +/** + * This Fetch API interface represents a resource request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) + */ +declare var Request: { + prototype: Request; + new >(input: RequestInfo | URL, init?: RequestInit): Request; +}; +/** + * This Fetch API interface represents a resource request. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) + */ +interface Request> extends Body { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/clone) */ + clone(): Request; + /** + * Returns request's HTTP method, which is "GET" by default. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/method) + */ + method: string; + /** + * Returns the URL of request as a string. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/url) + */ + url: string; + /** + * Returns a Headers object consisting of the headers associated with request. Note that headers added in the network layer by the user agent will not be accounted for in this object, e.g., the "Host" header. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/headers) + */ + headers: Headers; + /** + * Returns the redirect mode associated with request, which is a string indicating how redirects for the request will be handled during fetching. A request will follow redirects by default. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/redirect) + */ + redirect: string; + fetcher: Fetcher | null; + /** + * Returns the signal associated with request, which is an AbortSignal object indicating whether or not request has been aborted, and its abort event handler. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/signal) + */ + signal: AbortSignal; + cf: Cf | undefined; + /** + * Returns request's subresource integrity metadata, which is a cryptographic hash of the resource being fetched. Its value consists of multiple hashes separated by whitespace. [SRI] + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/integrity) + */ + integrity: string; + /* Returns a boolean indicating whether or not request can outlive the global in which it was created. */ + keepalive: boolean; +} +interface RequestInit { + /* A string to set request's method. */ + method?: string; + /* A Headers object, an object literal, or an array of two-item arrays to set request's headers. */ + headers?: HeadersInit; + /* A BodyInit object or null to set request's body. */ + body?: BodyInit | null; + /* A string indicating whether request follows redirects, results in an error upon encountering a redirect, or returns the redirect (in an opaque fashion). Sets request's redirect. */ + redirect?: string; + fetcher?: (Fetcher | null); + cf?: Cf; + /* A cryptographic hash of the resource to be fetched by request. Sets request's integrity. */ + integrity?: string; + /* An AbortSignal to set request's signal. */ + signal?: (AbortSignal | null); + encodeResponseBody?: "automatic" | "manual"; +} +type Service = Fetcher; +type Fetcher = (T extends Rpc.EntrypointBranded ? Rpc.Provider : unknown) & { + fetch(input: RequestInfo | URL, init?: RequestInit): Promise; + connect(address: SocketAddress | string, options?: SocketOptions): Socket; +}; +interface KVNamespaceListKey { + name: Key; + expiration?: number; + metadata?: Metadata; +} +type KVNamespaceListResult = { + list_complete: false; + keys: KVNamespaceListKey[]; + cursor: string; + cacheStatus: string | null; +} | { + list_complete: true; + keys: KVNamespaceListKey[]; + cacheStatus: string | null; +}; +interface KVNamespace { + get(key: Key, options?: Partial>): Promise; + get(key: Key, type: "text"): Promise; + get(key: Key, type: "json"): Promise; + get(key: Key, type: "arrayBuffer"): Promise; + get(key: Key, type: "stream"): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"text">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"json">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"arrayBuffer">): Promise; + get(key: Key, options?: KVNamespaceGetOptions<"stream">): Promise; + get(key: Array, type: "text"): Promise>; + get(key: Array, type: "json"): Promise>; + get(key: Array, options?: Partial>): Promise>; + get(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>; + get(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>; + list(options?: KVNamespaceListOptions): Promise>; + put(key: Key, value: string | ArrayBuffer | ArrayBufferView | ReadableStream, options?: KVNamespacePutOptions): Promise; + getWithMetadata(key: Key, options?: Partial>): Promise>; + getWithMetadata(key: Key, type: "text"): Promise>; + getWithMetadata(key: Key, type: "json"): Promise>; + getWithMetadata(key: Key, type: "arrayBuffer"): Promise>; + getWithMetadata(key: Key, type: "stream"): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"text">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"json">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"arrayBuffer">): Promise>; + getWithMetadata(key: Key, options: KVNamespaceGetOptions<"stream">): Promise>; + getWithMetadata(key: Array, type: "text"): Promise>>; + getWithMetadata(key: Array, type: "json"): Promise>>; + getWithMetadata(key: Array, options?: Partial>): Promise>>; + getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>>; + getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>>; + delete(key: Key): Promise; +} +interface KVNamespaceListOptions { + limit?: number; + prefix?: (string | null); + cursor?: (string | null); +} +interface KVNamespaceGetOptions { + type: Type; + cacheTtl?: number; +} +interface KVNamespacePutOptions { + expiration?: number; + expirationTtl?: number; + metadata?: (any | null); +} +interface KVNamespaceGetWithMetadataResult { + value: Value | null; + metadata: Metadata | null; + cacheStatus: string | null; +} +type QueueContentType = "text" | "bytes" | "json" | "v8"; +interface Queue { + send(message: Body, options?: QueueSendOptions): Promise; + sendBatch(messages: Iterable>, options?: QueueSendBatchOptions): Promise; +} +interface QueueSendOptions { + contentType?: QueueContentType; + delaySeconds?: number; +} +interface QueueSendBatchOptions { + delaySeconds?: number; +} +interface MessageSendRequest { + body: Body; + contentType?: QueueContentType; + delaySeconds?: number; +} +interface QueueRetryOptions { + delaySeconds?: number; +} +interface Message { + readonly id: string; + readonly timestamp: Date; + readonly body: Body; + readonly attempts: number; + retry(options?: QueueRetryOptions): void; + ack(): void; +} +interface QueueEvent extends ExtendableEvent { + readonly messages: readonly Message[]; + readonly queue: string; + retryAll(options?: QueueRetryOptions): void; + ackAll(): void; +} +interface MessageBatch { + readonly messages: readonly Message[]; + readonly queue: string; + retryAll(options?: QueueRetryOptions): void; + ackAll(): void; +} +interface R2Error extends Error { + readonly name: string; + readonly code: number; + readonly message: string; + readonly action: string; + readonly stack: any; +} +interface R2ListOptions { + limit?: number; + prefix?: string; + cursor?: string; + delimiter?: string; + startAfter?: string; + include?: ("httpMetadata" | "customMetadata")[]; +} +declare abstract class R2Bucket { + head(key: string): Promise; + get(key: string, options: R2GetOptions & { + onlyIf: R2Conditional | Headers; + }): Promise; + get(key: string, options?: R2GetOptions): Promise; + put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions & { + onlyIf: R2Conditional | Headers; + }): Promise; + put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions): Promise; + createMultipartUpload(key: string, options?: R2MultipartOptions): Promise; + resumeMultipartUpload(key: string, uploadId: string): R2MultipartUpload; + delete(keys: string | string[]): Promise; + list(options?: R2ListOptions): Promise; +} +interface R2MultipartUpload { + readonly key: string; + readonly uploadId: string; + uploadPart(partNumber: number, value: ReadableStream | (ArrayBuffer | ArrayBufferView) | string | Blob, options?: R2UploadPartOptions): Promise; + abort(): Promise; + complete(uploadedParts: R2UploadedPart[]): Promise; +} +interface R2UploadedPart { + partNumber: number; + etag: string; +} +declare abstract class R2Object { + readonly key: string; + readonly version: string; + readonly size: number; + readonly etag: string; + readonly httpEtag: string; + readonly checksums: R2Checksums; + readonly uploaded: Date; + readonly httpMetadata?: R2HTTPMetadata; + readonly customMetadata?: Record; + readonly range?: R2Range; + readonly storageClass: string; + readonly ssecKeyMd5?: string; + writeHttpMetadata(headers: Headers): void; +} +interface R2ObjectBody extends R2Object { + get body(): ReadableStream; + get bodyUsed(): boolean; + arrayBuffer(): Promise; + text(): Promise; + json(): Promise; + blob(): Promise; +} +type R2Range = { + offset: number; + length?: number; +} | { + offset?: number; + length: number; +} | { + suffix: number; +}; +interface R2Conditional { + etagMatches?: string; + etagDoesNotMatch?: string; + uploadedBefore?: Date; + uploadedAfter?: Date; + secondsGranularity?: boolean; +} +interface R2GetOptions { + onlyIf?: (R2Conditional | Headers); + range?: (R2Range | Headers); + ssecKey?: (ArrayBuffer | string); +} +interface R2PutOptions { + onlyIf?: (R2Conditional | Headers); + httpMetadata?: (R2HTTPMetadata | Headers); + customMetadata?: Record; + md5?: (ArrayBuffer | string); + sha1?: (ArrayBuffer | string); + sha256?: (ArrayBuffer | string); + sha384?: (ArrayBuffer | string); + sha512?: (ArrayBuffer | string); + storageClass?: string; + ssecKey?: (ArrayBuffer | string); +} +interface R2MultipartOptions { + httpMetadata?: (R2HTTPMetadata | Headers); + customMetadata?: Record; + storageClass?: string; + ssecKey?: (ArrayBuffer | string); +} +interface R2Checksums { + readonly md5?: ArrayBuffer; + readonly sha1?: ArrayBuffer; + readonly sha256?: ArrayBuffer; + readonly sha384?: ArrayBuffer; + readonly sha512?: ArrayBuffer; + toJSON(): R2StringChecksums; +} +interface R2StringChecksums { + md5?: string; + sha1?: string; + sha256?: string; + sha384?: string; + sha512?: string; +} +interface R2HTTPMetadata { + contentType?: string; + contentLanguage?: string; + contentDisposition?: string; + contentEncoding?: string; + cacheControl?: string; + cacheExpiry?: Date; +} +type R2Objects = { + objects: R2Object[]; + delimitedPrefixes: string[]; +} & ({ + truncated: true; + cursor: string; +} | { + truncated: false; +}); +interface R2UploadPartOptions { + ssecKey?: (ArrayBuffer | string); +} +declare abstract class ScheduledEvent extends ExtendableEvent { + readonly scheduledTime: number; + readonly cron: string; + noRetry(): void; +} +interface ScheduledController { + readonly scheduledTime: number; + readonly cron: string; + noRetry(): void; +} +interface QueuingStrategy { + highWaterMark?: (number | bigint); + size?: (chunk: T) => number | bigint; +} +interface UnderlyingSink { + type?: string; + start?: (controller: WritableStreamDefaultController) => void | Promise; + write?: (chunk: W, controller: WritableStreamDefaultController) => void | Promise; + abort?: (reason: any) => void | Promise; + close?: () => void | Promise; +} +interface UnderlyingByteSource { + type: "bytes"; + autoAllocateChunkSize?: number; + start?: (controller: ReadableByteStreamController) => void | Promise; + pull?: (controller: ReadableByteStreamController) => void | Promise; + cancel?: (reason: any) => void | Promise; +} +interface UnderlyingSource { + type?: "" | undefined; + start?: (controller: ReadableStreamDefaultController) => void | Promise; + pull?: (controller: ReadableStreamDefaultController) => void | Promise; + cancel?: (reason: any) => void | Promise; + expectedLength?: (number | bigint); +} +interface Transformer { + readableType?: string; + writableType?: string; + start?: (controller: TransformStreamDefaultController) => void | Promise; + transform?: (chunk: I, controller: TransformStreamDefaultController) => void | Promise; + flush?: (controller: TransformStreamDefaultController) => void | Promise; + cancel?: (reason: any) => void | Promise; + expectedLength?: number; +} +interface StreamPipeOptions { + /** + * Pipes this readable stream to a given writable stream destination. The way in which the piping process behaves under various error conditions can be customized with a number of passed options. It returns a promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate as follows: + * + * An error in this source readable stream will abort destination, unless preventAbort is truthy. The returned promise will be rejected with the source's error, or with any error that occurs during aborting the destination. + * + * An error in destination will cancel this source readable stream, unless preventCancel is truthy. The returned promise will be rejected with the destination's error, or with any error that occurs during canceling the source. + * + * When this source readable stream closes, destination will be closed, unless preventClose is truthy. The returned promise will be fulfilled once this process completes, unless an error is encountered while closing the destination, in which case it will be rejected with that error. + * + * If destination starts out closed or closing, this source readable stream will be canceled, unless preventCancel is true. The returned promise will be rejected with an error indicating piping to a closed stream failed, or with any error that occurs during canceling the source. + * + * The signal option can be set to an AbortSignal to allow aborting an ongoing pipe operation via the corresponding AbortController. In this case, this source readable stream will be canceled, and destination aborted, unless the respective options preventCancel or preventAbort are set. + */ + preventClose?: boolean; + preventAbort?: boolean; + preventCancel?: boolean; + signal?: AbortSignal; +} +type ReadableStreamReadResult = { + done: false; + value: R; +} | { + done: true; + value?: undefined; +}; +/** + * This Streams API interface represents a readable stream of byte data. The Fetch API offers a concrete instance of a ReadableStream through the body property of a Response object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) + */ +interface ReadableStream { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/locked) */ + get locked(): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/cancel) */ + cancel(reason?: any): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) */ + getReader(): ReadableStreamDefaultReader; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) */ + getReader(options: ReadableStreamGetReaderOptions): ReadableStreamBYOBReader; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeThrough) */ + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeTo) */ + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/tee) */ + tee(): [ + ReadableStream, + ReadableStream + ]; + values(options?: ReadableStreamValuesOptions): AsyncIterableIterator; + [Symbol.asyncIterator](options?: ReadableStreamValuesOptions): AsyncIterableIterator; +} +/** + * This Streams API interface represents a readable stream of byte data. The Fetch API offers a concrete instance of a ReadableStream through the body property of a Response object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) + */ +declare const ReadableStream: { + prototype: ReadableStream; + new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new (underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; +}; +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader) */ +declare class ReadableStreamDefaultReader { + constructor(stream: ReadableStream); + get closed(): Promise; + cancel(reason?: any): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/read) */ + read(): Promise>; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/releaseLock) */ + releaseLock(): void; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader) */ +declare class ReadableStreamBYOBReader { + constructor(stream: ReadableStream); + get closed(): Promise; + cancel(reason?: any): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/read) */ + read(view: T): Promise>; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/releaseLock) */ + releaseLock(): void; + readAtLeast(minElements: number, view: T): Promise>; +} +interface ReadableStreamBYOBReaderReadableStreamBYOBReaderReadOptions { + min?: number; +} +interface ReadableStreamGetReaderOptions { + /** + * Creates a ReadableStreamBYOBReader and locks the stream to the new reader. + * + * This call behaves the same way as the no-argument variant, except that it only works on readable byte streams, i.e. streams which were constructed specifically with the ability to handle "bring your own buffer" reading. The returned BYOB reader provides the ability to directly read individual chunks from the stream via its read() method, into developer-supplied buffers, allowing more precise control over allocation. + */ + mode: "byob"; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest) */ +declare abstract class ReadableStreamBYOBRequest { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/view) */ + get view(): Uint8Array | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respond) */ + respond(bytesWritten: number): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respondWithNewView) */ + respondWithNewView(view: ArrayBuffer | ArrayBufferView): void; + get atLeast(): number | null; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController) */ +declare abstract class ReadableStreamDefaultController { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/desiredSize) */ + get desiredSize(): number | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/close) */ + close(): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/enqueue) */ + enqueue(chunk?: R): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/error) */ + error(reason: any): void; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController) */ +declare abstract class ReadableByteStreamController { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/byobRequest) */ + get byobRequest(): ReadableStreamBYOBRequest | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/desiredSize) */ + get desiredSize(): number | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/close) */ + close(): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/enqueue) */ + enqueue(chunk: ArrayBuffer | ArrayBufferView): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/error) */ + error(reason: any): void; +} +/** + * This Streams API interface represents a controller allowing control of a WritableStream's state. When constructing a WritableStream, the underlying sink is given a corresponding WritableStreamDefaultController instance to manipulate. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController) + */ +declare abstract class WritableStreamDefaultController { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/signal) */ + get signal(): AbortSignal; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/error) */ + error(reason?: any): void; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController) */ +declare abstract class TransformStreamDefaultController { + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/desiredSize) */ + get desiredSize(): number | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/enqueue) */ + enqueue(chunk?: O): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/error) */ + error(reason: any): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/terminate) */ + terminate(): void; +} +interface ReadableWritablePair { + /** + * Provides a convenient, chainable way of piping this readable stream through a transform stream (or any other { writable, readable } pair). It simply pipes the stream into the writable side of the supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. + */ + writable: WritableStream; + readable: ReadableStream; +} +/** + * This Streams API interface provides a standard abstraction for writing streaming data to a destination, known as a sink. This object comes with built-in backpressure and queuing. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream) + */ +declare class WritableStream { + constructor(underlyingSink?: UnderlyingSink, queuingStrategy?: QueuingStrategy); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/locked) */ + get locked(): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/abort) */ + abort(reason?: any): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/close) */ + close(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/getWriter) */ + getWriter(): WritableStreamDefaultWriter; +} +/** + * This Streams API interface is the object returned by WritableStream.getWriter() and once created locks the < writer to the WritableStream ensuring that no other streams can write to the underlying sink. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter) + */ +declare class WritableStreamDefaultWriter { + constructor(stream: WritableStream); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/closed) */ + get closed(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/ready) */ + get ready(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/desiredSize) */ + get desiredSize(): number | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/abort) */ + abort(reason?: any): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/close) */ + close(): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/write) */ + write(chunk?: W): Promise; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/releaseLock) */ + releaseLock(): void; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream) */ +declare class TransformStream { + constructor(transformer?: Transformer, writableStrategy?: QueuingStrategy, readableStrategy?: QueuingStrategy); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/readable) */ + get readable(): ReadableStream; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/writable) */ + get writable(): WritableStream; +} +declare class FixedLengthStream extends IdentityTransformStream { + constructor(expectedLength: number | bigint, queuingStrategy?: IdentityTransformStreamQueuingStrategy); +} +declare class IdentityTransformStream extends TransformStream { + constructor(queuingStrategy?: IdentityTransformStreamQueuingStrategy); +} +interface IdentityTransformStreamQueuingStrategy { + highWaterMark?: (number | bigint); +} +interface ReadableStreamValuesOptions { + preventCancel?: boolean; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CompressionStream) */ +declare class CompressionStream extends TransformStream { + constructor(format: "gzip" | "deflate" | "deflate-raw"); +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/DecompressionStream) */ +declare class DecompressionStream extends TransformStream { + constructor(format: "gzip" | "deflate" | "deflate-raw"); +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoderStream) */ +declare class TextEncoderStream extends TransformStream { + constructor(); + get encoding(): string; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoderStream) */ +declare class TextDecoderStream extends TransformStream { + constructor(label?: string, options?: TextDecoderStreamTextDecoderStreamInit); + get encoding(): string; + get fatal(): boolean; + get ignoreBOM(): boolean; +} +interface TextDecoderStreamTextDecoderStreamInit { + fatal?: boolean; + ignoreBOM?: boolean; +} +/** + * This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy) + */ +declare class ByteLengthQueuingStrategy implements QueuingStrategy { + constructor(init: QueuingStrategyInit); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/highWaterMark) */ + get highWaterMark(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/size) */ + get size(): (chunk?: any) => number; +} +/** + * This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy) + */ +declare class CountQueuingStrategy implements QueuingStrategy { + constructor(init: QueuingStrategyInit); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/highWaterMark) */ + get highWaterMark(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/size) */ + get size(): (chunk?: any) => number; +} +interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water mark. + * + * Note that the provided high water mark will not be validated ahead of time. Instead, if it is negative, NaN, or not a number, the resulting ByteLengthQueuingStrategy will cause the corresponding stream constructor to throw. + */ + highWaterMark: number; +} +interface ScriptVersion { + id?: string; + tag?: string; + message?: string; +} +declare abstract class TailEvent extends ExtendableEvent { + readonly events: TraceItem[]; + readonly traces: TraceItem[]; +} +interface TraceItem { + readonly event: (TraceItemFetchEventInfo | TraceItemJsRpcEventInfo | TraceItemScheduledEventInfo | TraceItemAlarmEventInfo | TraceItemQueueEventInfo | TraceItemEmailEventInfo | TraceItemTailEventInfo | TraceItemCustomEventInfo | TraceItemHibernatableWebSocketEventInfo) | null; + readonly eventTimestamp: number | null; + readonly logs: TraceLog[]; + readonly exceptions: TraceException[]; + readonly diagnosticsChannelEvents: TraceDiagnosticChannelEvent[]; + readonly scriptName: string | null; + readonly entrypoint?: string; + readonly scriptVersion?: ScriptVersion; + readonly dispatchNamespace?: string; + readonly scriptTags?: string[]; + readonly outcome: string; + readonly executionModel: string; + readonly truncated: boolean; + readonly cpuTime: number; + readonly wallTime: number; +} +interface TraceItemAlarmEventInfo { + readonly scheduledTime: Date; +} +interface TraceItemCustomEventInfo { +} +interface TraceItemScheduledEventInfo { + readonly scheduledTime: number; + readonly cron: string; +} +interface TraceItemQueueEventInfo { + readonly queue: string; + readonly batchSize: number; +} +interface TraceItemEmailEventInfo { + readonly mailFrom: string; + readonly rcptTo: string; + readonly rawSize: number; +} +interface TraceItemTailEventInfo { + readonly consumedEvents: TraceItemTailEventInfoTailItem[]; +} +interface TraceItemTailEventInfoTailItem { + readonly scriptName: string | null; +} +interface TraceItemFetchEventInfo { + readonly response?: TraceItemFetchEventInfoResponse; + readonly request: TraceItemFetchEventInfoRequest; +} +interface TraceItemFetchEventInfoRequest { + readonly cf?: any; + readonly headers: Record; + readonly method: string; + readonly url: string; + getUnredacted(): TraceItemFetchEventInfoRequest; +} +interface TraceItemFetchEventInfoResponse { + readonly status: number; +} +interface TraceItemJsRpcEventInfo { + readonly rpcMethod: string; +} +interface TraceItemHibernatableWebSocketEventInfo { + readonly getWebSocketEvent: TraceItemHibernatableWebSocketEventInfoMessage | TraceItemHibernatableWebSocketEventInfoClose | TraceItemHibernatableWebSocketEventInfoError; +} +interface TraceItemHibernatableWebSocketEventInfoMessage { + readonly webSocketEventType: string; +} +interface TraceItemHibernatableWebSocketEventInfoClose { + readonly webSocketEventType: string; + readonly code: number; + readonly wasClean: boolean; +} +interface TraceItemHibernatableWebSocketEventInfoError { + readonly webSocketEventType: string; +} +interface TraceLog { + readonly timestamp: number; + readonly level: string; + readonly message: any; +} +interface TraceException { + readonly timestamp: number; + readonly message: string; + readonly name: string; + readonly stack?: string; +} +interface TraceDiagnosticChannelEvent { + readonly timestamp: number; + readonly channel: string; + readonly message: any; +} +interface TraceMetrics { + readonly cpuTime: number; + readonly wallTime: number; +} +interface UnsafeTraceMetrics { + fromTrace(item: TraceItem): TraceMetrics; +} +/** + * The URL interface represents an object providing static methods used for creating object URLs. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL) + */ +declare class URL { + constructor(url: string | URL, base?: string | URL); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/origin) */ + get origin(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) */ + get href(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) */ + set href(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) */ + get protocol(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) */ + set protocol(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) */ + get username(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) */ + set username(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) */ + get password(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) */ + set password(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) */ + get host(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) */ + set host(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) */ + get hostname(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) */ + set hostname(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) */ + get port(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) */ + set port(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) */ + get pathname(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) */ + set pathname(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) */ + get search(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) */ + set search(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) */ + get hash(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) */ + set hash(value: string); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/searchParams) */ + get searchParams(): URLSearchParams; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/toJSON) */ + toJSON(): string; + /*function toString() { [native code] }*/ + toString(): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/canParse_static) */ + static canParse(url: string, base?: string): boolean; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/parse_static) */ + static parse(url: string, base?: string): URL | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/createObjectURL_static) */ + static createObjectURL(object: File | Blob): string; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/revokeObjectURL_static) */ + static revokeObjectURL(object_url: string): void; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams) */ +declare class URLSearchParams { + constructor(init?: (Iterable> | Record | string)); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/size) */ + get size(): number; + /** + * Appends a specified key/value pair as a new search parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/append) + */ + append(name: string, value: string): void; + /** + * Deletes the given search parameter, and its associated value, from the list of all search parameters. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/delete) + */ + delete(name: string, value?: string): void; + /** + * Returns the first value associated to the given search parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/get) + */ + get(name: string): string | null; + /** + * Returns all the values association with a given search parameter. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/getAll) + */ + getAll(name: string): string[]; + /** + * Returns a Boolean indicating if such a search parameter exists. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/has) + */ + has(name: string, value?: string): boolean; + /** + * Sets the value associated to a given search parameter to the given value. If there were several values, delete the others. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/set) + */ + set(name: string, value: string): void; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/sort) */ + sort(): void; + /* Returns an array of key, value pairs for every entry in the search params. */ + entries(): IterableIterator<[ + key: string, + value: string + ]>; + /* Returns a list of keys in the search params. */ + keys(): IterableIterator; + /* Returns a list of values in the search params. */ + values(): IterableIterator; + forEach(callback: (this: This, value: string, key: string, parent: URLSearchParams) => void, thisArg?: This): void; + /*function toString() { [native code] } Returns a string containing a query string suitable for use in a URL. Does not include the question mark. */ + toString(): string; + [Symbol.iterator](): IterableIterator<[ + key: string, + value: string + ]>; +} +declare class URLPattern { + constructor(input?: (string | URLPatternURLPatternInit), baseURL?: (string | URLPatternURLPatternOptions), patternOptions?: URLPatternURLPatternOptions); + get protocol(): string; + get username(): string; + get password(): string; + get hostname(): string; + get port(): string; + get pathname(): string; + get search(): string; + get hash(): string; + test(input?: (string | URLPatternURLPatternInit), baseURL?: string): boolean; + exec(input?: (string | URLPatternURLPatternInit), baseURL?: string): URLPatternURLPatternResult | null; +} +interface URLPatternURLPatternInit { + protocol?: string; + username?: string; + password?: string; + hostname?: string; + port?: string; + pathname?: string; + search?: string; + hash?: string; + baseURL?: string; +} +interface URLPatternURLPatternComponentResult { + input: string; + groups: Record; +} +interface URLPatternURLPatternResult { + inputs: (string | URLPatternURLPatternInit)[]; + protocol: URLPatternURLPatternComponentResult; + username: URLPatternURLPatternComponentResult; + password: URLPatternURLPatternComponentResult; + hostname: URLPatternURLPatternComponentResult; + port: URLPatternURLPatternComponentResult; + pathname: URLPatternURLPatternComponentResult; + search: URLPatternURLPatternComponentResult; + hash: URLPatternURLPatternComponentResult; +} +interface URLPatternURLPatternOptions { + ignoreCase?: boolean; +} +/** + * A CloseEvent is sent to clients using WebSockets when the connection is closed. This is delivered to the listener indicated by the WebSocket object's onclose attribute. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent) + */ +declare class CloseEvent extends Event { + constructor(type: string, initializer?: CloseEventInit); + /** + * Returns the WebSocket connection close code provided by the server. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/code) + */ + readonly code: number; + /** + * Returns the WebSocket connection close reason provided by the server. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/reason) + */ + readonly reason: string; + /** + * Returns true if the connection closed cleanly; false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/wasClean) + */ + readonly wasClean: boolean; +} +interface CloseEventInit { + code?: number; + reason?: string; + wasClean?: boolean; +} +/** + * A message received by a target object. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent) + */ +declare class MessageEvent extends Event { + constructor(type: string, initializer: MessageEventInit); + /** + * Returns the data of the message. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/data) + */ + readonly data: ArrayBuffer | string; +} +interface MessageEventInit { + data: ArrayBuffer | string; +} +type WebSocketEventMap = { + close: CloseEvent; + message: MessageEvent; + open: Event; + error: ErrorEvent; +}; +/** + * Provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) + */ +declare var WebSocket: { + prototype: WebSocket; + new (url: string, protocols?: (string[] | string)): WebSocket; + readonly READY_STATE_CONNECTING: number; + readonly CONNECTING: number; + readonly READY_STATE_OPEN: number; + readonly OPEN: number; + readonly READY_STATE_CLOSING: number; + readonly CLOSING: number; + readonly READY_STATE_CLOSED: number; + readonly CLOSED: number; +}; +/** + * Provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) + */ +interface WebSocket extends EventTarget { + accept(): void; + /** + * Transmits data using the WebSocket connection. data can be a string, a Blob, an ArrayBuffer, or an ArrayBufferView. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/send) + */ + send(message: (ArrayBuffer | ArrayBufferView) | string): void; + /** + * Closes the WebSocket connection, optionally using code as the the WebSocket connection close code and reason as the the WebSocket connection close reason. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/close) + */ + close(code?: number, reason?: string): void; + serializeAttachment(attachment: any): void; + deserializeAttachment(): any | null; + /** + * Returns the state of the WebSocket object's connection. It can have the values described below. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/readyState) + */ + readyState: number; + /** + * Returns the URL that was used to establish the WebSocket connection. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/url) + */ + url: string | null; + /** + * Returns the subprotocol selected by the server, if any. It can be used in conjunction with the array form of the constructor's second argument to perform subprotocol negotiation. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/protocol) + */ + protocol: string | null; + /** + * Returns the extensions selected by the server, if any. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/extensions) + */ + extensions: string | null; +} +declare const WebSocketPair: { + new (): { + 0: WebSocket; + 1: WebSocket; + }; +}; +interface SqlStorage { + exec>(query: string, ...bindings: any[]): SqlStorageCursor; + get databaseSize(): number; + Cursor: typeof SqlStorageCursor; + Statement: typeof SqlStorageStatement; +} +declare abstract class SqlStorageStatement { +} +type SqlStorageValue = ArrayBuffer | string | number | null; +declare abstract class SqlStorageCursor> { + next(): { + done?: false; + value: T; + } | { + done: true; + value?: never; + }; + toArray(): T[]; + one(): T; + raw(): IterableIterator; + columnNames: string[]; + get rowsRead(): number; + get rowsWritten(): number; + [Symbol.iterator](): IterableIterator; +} +interface Socket { + get readable(): ReadableStream; + get writable(): WritableStream; + get closed(): Promise; + get opened(): Promise; + close(): Promise; + startTls(options?: TlsOptions): Socket; +} +interface SocketOptions { + secureTransport?: string; + allowHalfOpen: boolean; + highWaterMark?: (number | bigint); +} +interface SocketAddress { + hostname: string; + port: number; +} +interface TlsOptions { + expectedServerHostname?: string; +} +interface SocketInfo { + remoteAddress?: string; + localAddress?: string; +} +/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource) */ +declare class EventSource extends EventTarget { + constructor(url: string, init?: EventSourceEventSourceInit); + /** + * Aborts any instances of the fetch algorithm started for this EventSource object, and sets the readyState attribute to CLOSED. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/close) + */ + close(): void; + /** + * Returns the URL providing the event stream. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/url) + */ + get url(): string; + /** + * Returns true if the credentials mode for connection requests to the URL providing the event stream is set to "include", and false otherwise. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/withCredentials) + */ + get withCredentials(): boolean; + /** + * Returns the state of this EventSource object's connection. It can have the values described below. + * + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/readyState) + */ + get readyState(): number; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ + get onopen(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ + set onopen(value: any | null); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ + get onmessage(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ + set onmessage(value: any | null); + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ + get onerror(): any | null; + /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ + set onerror(value: any | null); + static readonly CONNECTING: number; + static readonly OPEN: number; + static readonly CLOSED: number; + static from(stream: ReadableStream): EventSource; +} +interface EventSourceEventSourceInit { + withCredentials?: boolean; + fetcher?: Fetcher; +} +interface Container { + get running(): boolean; + start(options?: ContainerStartupOptions): void; + monitor(): Promise; + destroy(error?: any): Promise; + signal(signo: number): void; + getTcpPort(port: number): Fetcher; +} +interface ContainerStartupOptions { + entrypoint?: string[]; + enableInternet: boolean; + env?: Record; +} +type AiImageClassificationInput = { + image: number[]; +}; +type AiImageClassificationOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiImageClassification { + inputs: AiImageClassificationInput; + postProcessedOutputs: AiImageClassificationOutput; +} +type AiImageToTextInput = { + image: number[]; + prompt?: string; + max_tokens?: number; + temperature?: number; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + raw?: boolean; + messages?: RoleScopedChatInput[]; +}; +type AiImageToTextOutput = { + description: string; +}; +declare abstract class BaseAiImageToText { + inputs: AiImageToTextInput; + postProcessedOutputs: AiImageToTextOutput; +} +type AiImageTextToTextInput = { + image: string; + prompt?: string; + max_tokens?: number; + temperature?: number; + ignore_eos?: boolean; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + raw?: boolean; + messages?: RoleScopedChatInput[]; +}; +type AiImageTextToTextOutput = { + description: string; +}; +declare abstract class BaseAiImageTextToText { + inputs: AiImageTextToTextInput; + postProcessedOutputs: AiImageTextToTextOutput; +} +type AiObjectDetectionInput = { + image: number[]; +}; +type AiObjectDetectionOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiObjectDetection { + inputs: AiObjectDetectionInput; + postProcessedOutputs: AiObjectDetectionOutput; +} +type AiSentenceSimilarityInput = { + source: string; + sentences: string[]; +}; +type AiSentenceSimilarityOutput = number[]; +declare abstract class BaseAiSentenceSimilarity { + inputs: AiSentenceSimilarityInput; + postProcessedOutputs: AiSentenceSimilarityOutput; +} +type AiAutomaticSpeechRecognitionInput = { + audio: number[]; +}; +type AiAutomaticSpeechRecognitionOutput = { + text?: string; + words?: { + word: string; + start: number; + end: number; + }[]; + vtt?: string; +}; +declare abstract class BaseAiAutomaticSpeechRecognition { + inputs: AiAutomaticSpeechRecognitionInput; + postProcessedOutputs: AiAutomaticSpeechRecognitionOutput; +} +type AiSummarizationInput = { + input_text: string; + max_length?: number; +}; +type AiSummarizationOutput = { + summary: string; +}; +declare abstract class BaseAiSummarization { + inputs: AiSummarizationInput; + postProcessedOutputs: AiSummarizationOutput; +} +type AiTextClassificationInput = { + text: string; +}; +type AiTextClassificationOutput = { + score?: number; + label?: string; +}[]; +declare abstract class BaseAiTextClassification { + inputs: AiTextClassificationInput; + postProcessedOutputs: AiTextClassificationOutput; +} +type AiTextEmbeddingsInput = { + text: string | string[]; +}; +type AiTextEmbeddingsOutput = { + shape: number[]; + data: number[][]; +}; +declare abstract class BaseAiTextEmbeddings { + inputs: AiTextEmbeddingsInput; + postProcessedOutputs: AiTextEmbeddingsOutput; +} +type RoleScopedChatInput = { + role: "user" | "assistant" | "system" | "tool" | (string & NonNullable); + content: string; + name?: string; +}; +type AiTextGenerationToolLegacyInput = { + name: string; + description: string; + parameters?: { + type: "object" | (string & NonNullable); + properties: { + [key: string]: { + type: string; + description?: string; + }; + }; + required: string[]; + }; +}; +type AiTextGenerationToolInput = { + type: "function" | (string & NonNullable); + function: { + name: string; + description: string; + parameters?: { + type: "object" | (string & NonNullable); + properties: { + [key: string]: { + type: string; + description?: string; + }; + }; + required: string[]; + }; + }; +}; +type AiTextGenerationFunctionsInput = { + name: string; + code: string; +}; +type AiTextGenerationResponseFormat = { + type: string; + json_schema?: any; +}; +type AiTextGenerationInput = { + prompt?: string; + raw?: boolean; + stream?: boolean; + max_tokens?: number; + temperature?: number; + top_p?: number; + top_k?: number; + seed?: number; + repetition_penalty?: number; + frequency_penalty?: number; + presence_penalty?: number; + messages?: RoleScopedChatInput[]; + response_format?: AiTextGenerationResponseFormat; + tools?: AiTextGenerationToolInput[] | AiTextGenerationToolLegacyInput[] | (object & NonNullable); + functions?: AiTextGenerationFunctionsInput[]; +}; +type AiTextGenerationOutput = { + response?: string; + tool_calls?: { + name: string; + arguments: unknown; + }[]; +} | ReadableStream; +declare abstract class BaseAiTextGeneration { + inputs: AiTextGenerationInput; + postProcessedOutputs: AiTextGenerationOutput; +} +type AiTextToSpeechInput = { + prompt: string; + lang?: string; +}; +type AiTextToSpeechOutput = Uint8Array | { + audio: string; +}; +declare abstract class BaseAiTextToSpeech { + inputs: AiTextToSpeechInput; + postProcessedOutputs: AiTextToSpeechOutput; +} +type AiTextToImageInput = { + prompt: string; + negative_prompt?: string; + height?: number; + width?: number; + image?: number[]; + image_b64?: string; + mask?: number[]; + num_steps?: number; + strength?: number; + guidance?: number; + seed?: number; +}; +type AiTextToImageOutput = ReadableStream; +declare abstract class BaseAiTextToImage { + inputs: AiTextToImageInput; + postProcessedOutputs: AiTextToImageOutput; +} +type AiTranslationInput = { + text: string; + target_lang: string; + source_lang?: string; +}; +type AiTranslationOutput = { + translated_text?: string; +}; +declare abstract class BaseAiTranslation { + inputs: AiTranslationInput; + postProcessedOutputs: AiTranslationOutput; +} +type Ai_Cf_Openai_Whisper_Input = string | { + /** + * An array of integers that represent the audio data constrained to 8-bit unsigned integer values + */ + audio: number[]; +}; +interface Ai_Cf_Openai_Whisper_Output { + /** + * The transcription + */ + text: string; + word_count?: number; + words?: { + word?: string; + /** + * The second this word begins in the recording + */ + start?: number; + /** + * The ending second when the word completes + */ + end?: number; + }[]; + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper { + inputs: Ai_Cf_Openai_Whisper_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Output; +} +type Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input = string | { + /** + * The input text prompt for the model to generate a response. + */ + prompt?: string; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + image: number[] | (string & NonNullable); + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; +}; +interface Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output { + description?: string; +} +declare abstract class Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M { + inputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input; + postProcessedOutputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output; +} +type Ai_Cf_Openai_Whisper_Tiny_En_Input = string | { + /** + * An array of integers that represent the audio data constrained to 8-bit unsigned integer values + */ + audio: number[]; +}; +interface Ai_Cf_Openai_Whisper_Tiny_En_Output { + /** + * The transcription + */ + text: string; + word_count?: number; + words?: { + word?: string; + /** + * The second this word begins in the recording + */ + start?: number; + /** + * The ending second when the word completes + */ + end?: number; + }[]; + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper_Tiny_En { + inputs: Ai_Cf_Openai_Whisper_Tiny_En_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Tiny_En_Output; +} +interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input { + /** + * Base64 encoded value of the audio data. + */ + audio: string; + /** + * Supported tasks are 'translate' or 'transcribe'. + */ + task?: string; + /** + * The language of the audio being transcribed or translated. + */ + language?: string; + /** + * Preprocess the audio with a voice activity detection model. + */ + vad_filter?: string; + /** + * A text prompt to help provide context to the model on the contents of the audio. + */ + initial_prompt?: string; + /** + * The prefix it appended the the beginning of the output of the transcription and can guide the transcription result. + */ + prefix?: string; +} +interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output { + transcription_info?: { + /** + * The language of the audio being transcribed or translated. + */ + language?: string; + /** + * The confidence level or probability of the detected language being accurate, represented as a decimal between 0 and 1. + */ + language_probability?: number; + /** + * The total duration of the original audio file, in seconds. + */ + duration?: number; + /** + * The duration of the audio after applying Voice Activity Detection (VAD) to remove silent or irrelevant sections, in seconds. + */ + duration_after_vad?: number; + }; + /** + * The complete transcription of the audio. + */ + text: string; + /** + * The total number of words in the transcription. + */ + word_count?: number; + segments?: { + /** + * The starting time of the segment within the audio, in seconds. + */ + start?: number; + /** + * The ending time of the segment within the audio, in seconds. + */ + end?: number; + /** + * The transcription of the segment. + */ + text?: string; + /** + * The temperature used in the decoding process, controlling randomness in predictions. Lower values result in more deterministic outputs. + */ + temperature?: number; + /** + * The average log probability of the predictions for the words in this segment, indicating overall confidence. + */ + avg_logprob?: number; + /** + * The compression ratio of the input to the output, measuring how much the text was compressed during the transcription process. + */ + compression_ratio?: number; + /** + * The probability that the segment contains no speech, represented as a decimal between 0 and 1. + */ + no_speech_prob?: number; + words?: { + /** + * The individual word transcribed from the audio. + */ + word?: string; + /** + * The starting time of the word within the audio, in seconds. + */ + start?: number; + /** + * The ending time of the word within the audio, in seconds. + */ + end?: number; + }[]; + }[]; + /** + * The transcription in WebVTT format, which includes timing and text information for use in subtitles. + */ + vtt?: string; +} +declare abstract class Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo { + inputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input; + postProcessedOutputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output; +} +type Ai_Cf_Baai_Bge_M3_Input = BGEM3InputQueryAndContexts | BGEM3InputEmbedding; +interface BGEM3InputQueryAndContexts { + /** + * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts + */ + query?: string; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +interface BGEM3InputEmbedding { + text: string | string[]; + /** + * When provided with too long context should the model error out or truncate the context to fit? + */ + truncate_inputs?: boolean; +} +type Ai_Cf_Baai_Bge_M3_Output = BGEM3OuputQuery | BGEM3OutputEmbeddingForContexts | BGEM3OuputEmbedding; +interface BGEM3OuputQuery { + response?: { + /** + * Index of the context in the request + */ + id?: number; + /** + * Score of the context under the index. + */ + score?: number; + }[]; +} +interface BGEM3OutputEmbeddingForContexts { + response?: number[][]; + shape?: number[]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} +interface BGEM3OuputEmbedding { + shape?: number[]; + /** + * Embeddings of the requested text values + */ + data?: number[][]; + /** + * The pooling method used in the embedding process. + */ + pooling?: "mean" | "cls"; +} +declare abstract class Base_Ai_Cf_Baai_Bge_M3 { + inputs: Ai_Cf_Baai_Bge_M3_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_M3_Output; +} +interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input { + /** + * A text description of the image you want to generate. + */ + prompt: string; + /** + * The number of diffusion steps; higher values can improve quality but take longer. + */ + steps?: number; +} +interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output { + /** + * The generated image in Base64 format. + */ + image?: string; +} +declare abstract class Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell { + inputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input; + postProcessedOutputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output; +} +type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input = Prompt | Messages; +interface Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + image?: number[] | (string & NonNullable); + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; + /** + * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. + */ + lora?: string; +} +interface Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role: string; + /** + * The content of the message as a string. + */ + content: string; + }[]; + image?: number[] | string; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * If true, the response will be streamed back incrementally. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output = { + /** + * The generated text response from the model + */ + response?: string; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +} | ReadableStream; +declare abstract class Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct { + inputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output; +} +interface Ai_Cf_Meta_Llama_Guard_3_8B_Input { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender must alternate between 'user' and 'assistant'. + */ + role: "user" | "assistant"; + /** + * The content of the message as a string. + */ + content: string; + }[]; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Dictate the output format of the generated response. + */ + response_format?: { + /** + * Set to json_object to process and output generated text as JSON. + */ + type?: string; + }; +} +interface Ai_Cf_Meta_Llama_Guard_3_8B_Output { + response?: string | { + /** + * Whether the conversation is safe or not. + */ + safe?: boolean; + /** + * A list of what hazard categories predicted for the conversation, if the conversation is deemed unsafe. + */ + categories?: string[]; + }; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; +} +declare abstract class Base_Ai_Cf_Meta_Llama_Guard_3_8B { + inputs: Ai_Cf_Meta_Llama_Guard_3_8B_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_Guard_3_8B_Output; +} +interface Ai_Cf_Baai_Bge_Reranker_Base_Input { + /** + * A query you wish to perform against the provided contexts. + */ + /** + * Number of returned results starting with the best score. + */ + top_k?: number; + /** + * List of provided contexts. Note that the index in this array is important, as the response will refer to it. + */ + contexts: { + /** + * One of the provided context content + */ + text?: string; + }[]; +} +interface Ai_Cf_Baai_Bge_Reranker_Base_Output { + response?: { + /** + * Index of the context in the request + */ + id?: number; + /** + * Score of the context under the index. + */ + score?: number; + }[]; +} +declare abstract class Base_Ai_Cf_Baai_Bge_Reranker_Base { + inputs: Ai_Cf_Baai_Bge_Reranker_Base_Input; + postProcessedOutputs: Ai_Cf_Baai_Bge_Reranker_Base_Output; +} +type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input = Ai_Cf_Meta_Llama_4_Prompt | Ai_Cf_Meta_Llama_4_Messages; +interface Ai_Cf_Meta_Llama_4_Prompt { + /** + * The input text prompt for the model to generate a response. + */ + prompt: string; + /** + * JSON schema that should be fulfilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +interface Ai_Cf_Meta_Llama_4_Messages { + /** + * An array of message objects representing the conversation history. + */ + messages: { + /** + * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). + */ + role?: string; + /** + * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 + */ + tool_call_id?: string; + content?: string | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }[] | { + /** + * Type of the content provided + */ + type?: string; + text?: string; + image_url?: { + /** + * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted + */ + url?: string; + }; + }; + }[]; + functions?: { + name: string; + code: string; + }[]; + /** + * A list of tools available for the assistant to use. + */ + tools?: ({ + /** + * The name of the tool. More descriptive the better. + */ + name: string; + /** + * A brief description of what the tool does. + */ + description: string; + /** + * Schema defining the parameters accepted by the tool. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + } | { + /** + * Specifies the type of tool (e.g., 'function'). + */ + type: string; + /** + * Details of the function tool. + */ + function: { + /** + * The name of the function. + */ + name: string; + /** + * A brief description of what the function does. + */ + description: string; + /** + * Schema defining the parameters accepted by the function. + */ + parameters: { + /** + * The type of the parameters object (usually 'object'). + */ + type: string; + /** + * List of required parameter names. + */ + required?: string[]; + /** + * Definitions of each parameter. + */ + properties: { + [k: string]: { + /** + * The data type of the parameter. + */ + type: string; + /** + * A description of the expected parameter. + */ + description: string; + }; + }; + }; + }; + })[]; + /** + * JSON schema that should be fufilled for the response. + */ + guided_json?: object; + /** + * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. + */ + raw?: boolean; + /** + * If true, the response will be streamed back incrementally using SSE, Server Sent Events. + */ + stream?: boolean; + /** + * The maximum number of tokens to generate in the response. + */ + max_tokens?: number; + /** + * Controls the randomness of the output; higher values produce more random results. + */ + temperature?: number; + /** + * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. + */ + top_p?: number; + /** + * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. + */ + top_k?: number; + /** + * Random seed for reproducibility of the generation. + */ + seed?: number; + /** + * Penalty for repeated tokens; higher values discourage repetition. + */ + repetition_penalty?: number; + /** + * Decreases the likelihood of the model repeating the same lines verbatim. + */ + frequency_penalty?: number; + /** + * Increases the likelihood of the model introducing new topics. + */ + presence_penalty?: number; +} +type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output = { + /** + * The generated text response from the model + */ + response: string; + /** + * Usage statistics for the inference request + */ + usage?: { + /** + * Total number of tokens in input + */ + prompt_tokens?: number; + /** + * Total number of tokens in output + */ + completion_tokens?: number; + /** + * Total number of input and output tokens + */ + total_tokens?: number; + }; + /** + * An array of tool calls requests made during the response generation + */ + tool_calls?: { + /** + * The arguments passed to be passed to the tool call request + */ + arguments?: object; + /** + * The name of the tool to be called + */ + name?: string; + }[]; +} | string; +declare abstract class Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct { + inputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input; + postProcessedOutputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output; +} +interface AiModels { + "@cf/huggingface/distilbert-sst-2-int8": BaseAiTextClassification; + "@cf/stabilityai/stable-diffusion-xl-base-1.0": BaseAiTextToImage; + "@cf/runwayml/stable-diffusion-v1-5-inpainting": BaseAiTextToImage; + "@cf/runwayml/stable-diffusion-v1-5-img2img": BaseAiTextToImage; + "@cf/lykon/dreamshaper-8-lcm": BaseAiTextToImage; + "@cf/bytedance/stable-diffusion-xl-lightning": BaseAiTextToImage; + "@cf/myshell-ai/melotts": BaseAiTextToSpeech; + "@cf/baai/bge-base-en-v1.5": BaseAiTextEmbeddings; + "@cf/baai/bge-small-en-v1.5": BaseAiTextEmbeddings; + "@cf/baai/bge-large-en-v1.5": BaseAiTextEmbeddings; + "@cf/microsoft/resnet-50": BaseAiImageClassification; + "@cf/facebook/detr-resnet-50": BaseAiObjectDetection; + "@cf/meta/llama-2-7b-chat-int8": BaseAiTextGeneration; + "@cf/mistral/mistral-7b-instruct-v0.1": BaseAiTextGeneration; + "@cf/meta/llama-2-7b-chat-fp16": BaseAiTextGeneration; + "@hf/thebloke/llama-2-13b-chat-awq": BaseAiTextGeneration; + "@hf/thebloke/mistral-7b-instruct-v0.1-awq": BaseAiTextGeneration; + "@hf/thebloke/zephyr-7b-beta-awq": BaseAiTextGeneration; + "@hf/thebloke/openhermes-2.5-mistral-7b-awq": BaseAiTextGeneration; + "@hf/thebloke/neural-chat-7b-v3-1-awq": BaseAiTextGeneration; + "@hf/thebloke/llamaguard-7b-awq": BaseAiTextGeneration; + "@hf/thebloke/deepseek-coder-6.7b-base-awq": BaseAiTextGeneration; + "@hf/thebloke/deepseek-coder-6.7b-instruct-awq": BaseAiTextGeneration; + "@cf/deepseek-ai/deepseek-math-7b-instruct": BaseAiTextGeneration; + "@cf/defog/sqlcoder-7b-2": BaseAiTextGeneration; + "@cf/openchat/openchat-3.5-0106": BaseAiTextGeneration; + "@cf/tiiuae/falcon-7b-instruct": BaseAiTextGeneration; + "@cf/thebloke/discolm-german-7b-v1-awq": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-0.5b-chat": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-7b-chat-awq": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-14b-chat-awq": BaseAiTextGeneration; + "@cf/tinyllama/tinyllama-1.1b-chat-v1.0": BaseAiTextGeneration; + "@cf/microsoft/phi-2": BaseAiTextGeneration; + "@cf/qwen/qwen1.5-1.8b-chat": BaseAiTextGeneration; + "@cf/mistral/mistral-7b-instruct-v0.2-lora": BaseAiTextGeneration; + "@hf/nousresearch/hermes-2-pro-mistral-7b": BaseAiTextGeneration; + "@hf/nexusflow/starling-lm-7b-beta": BaseAiTextGeneration; + "@hf/google/gemma-7b-it": BaseAiTextGeneration; + "@cf/meta-llama/llama-2-7b-chat-hf-lora": BaseAiTextGeneration; + "@cf/google/gemma-2b-it-lora": BaseAiTextGeneration; + "@cf/google/gemma-7b-it-lora": BaseAiTextGeneration; + "@hf/mistral/mistral-7b-instruct-v0.2": BaseAiTextGeneration; + "@cf/meta/llama-3-8b-instruct": BaseAiTextGeneration; + "@cf/fblgit/una-cybertron-7b-v2-bf16": BaseAiTextGeneration; + "@cf/meta/llama-3-8b-instruct-awq": BaseAiTextGeneration; + "@hf/meta-llama/meta-llama-3-8b-instruct": BaseAiTextGeneration; + "@cf/meta/llama-3.1-8b-instruct": BaseAiTextGeneration; + "@cf/meta/llama-3.1-8b-instruct-fp8": BaseAiTextGeneration; + "@cf/meta/llama-3.1-8b-instruct-awq": BaseAiTextGeneration; + "@cf/meta/llama-3.2-3b-instruct": BaseAiTextGeneration; + "@cf/meta/llama-3.2-1b-instruct": BaseAiTextGeneration; + "@cf/meta/llama-3.3-70b-instruct-fp8-fast": BaseAiTextGeneration; + "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b": BaseAiTextGeneration; + "@cf/meta/m2m100-1.2b": BaseAiTranslation; + "@cf/facebook/bart-large-cnn": BaseAiSummarization; + "@cf/llava-hf/llava-1.5-7b-hf": BaseAiImageToText; + "@cf/openai/whisper": Base_Ai_Cf_Openai_Whisper; + "@cf/unum/uform-gen2-qwen-500m": Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M; + "@cf/openai/whisper-tiny-en": Base_Ai_Cf_Openai_Whisper_Tiny_En; + "@cf/openai/whisper-large-v3-turbo": Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo; + "@cf/baai/bge-m3": Base_Ai_Cf_Baai_Bge_M3; + "@cf/black-forest-labs/flux-1-schnell": Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell; + "@cf/meta/llama-3.2-11b-vision-instruct": Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct; + "@cf/meta/llama-guard-3-8b": Base_Ai_Cf_Meta_Llama_Guard_3_8B; + "@cf/baai/bge-reranker-base": Base_Ai_Cf_Baai_Bge_Reranker_Base; + "@cf/meta/llama-4-scout-17b-16e-instruct": Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct; +} +type AiOptions = { + gateway?: GatewayOptions; + returnRawResponse?: boolean; + prefix?: string; + extraHeaders?: object; +}; +type ConversionResponse = { + name: string; + mimeType: string; + format: "markdown"; + tokens: number; + data: string; +}; +type AiModelsSearchParams = { + author?: string; + hide_experimental?: boolean; + page?: number; + per_page?: number; + search?: string; + source?: number; + task?: string; +}; +type AiModelsSearchObject = { + id: string; + source: number; + name: string; + description: string; + task: { + id: string; + name: string; + description: string; + }; + tags: string[]; + properties: { + property_id: string; + value: string; + }[]; +}; +interface InferenceUpstreamError extends Error { +} +interface AiInternalError extends Error { +} +type AiModelListType = Record; +declare abstract class Ai { + aiGatewayLogId: string | null; + gateway(gatewayId: string): AiGateway; + autorag(autoragId: string): AutoRAG; + run(model: Name, inputs: AiModelList[Name]["inputs"], options?: Options): Promise; + models(params?: AiModelsSearchParams): Promise; + toMarkdown(files: { + name: string; + blob: Blob; + }[], options?: { + gateway?: GatewayOptions; + extraHeaders?: object; + }): Promise; + toMarkdown(files: { + name: string; + blob: Blob; + }, options?: { + gateway?: GatewayOptions; + extraHeaders?: object; + }): Promise; +} +type GatewayOptions = { + id: string; + cacheKey?: string; + cacheTtl?: number; + skipCache?: boolean; + metadata?: Record; + collectLog?: boolean; +}; +type AiGatewayPatchLog = { + score?: number | null; + feedback?: -1 | 1 | null; + metadata?: Record | null; +}; +type AiGatewayLog = { + id: string; + provider: string; + model: string; + model_type?: string; + path: string; + duration: number; + request_type?: string; + request_content_type?: string; + status_code: number; + response_content_type?: string; + success: boolean; + cached: boolean; + tokens_in?: number; + tokens_out?: number; + metadata?: Record; + step?: number; + cost?: number; + custom_cost?: boolean; + request_size: number; + request_head?: string; + request_head_complete: boolean; + response_size: number; + response_head?: string; + response_head_complete: boolean; + created_at: Date; +}; +type AIGatewayProviders = "workers-ai" | "anthropic" | "aws-bedrock" | "azure-openai" | "google-vertex-ai" | "huggingface" | "openai" | "perplexity-ai" | "replicate" | "groq" | "cohere" | "google-ai-studio" | "mistral" | "grok" | "openrouter" | "deepseek" | "cerebras" | "cartesia" | "elevenlabs" | "adobe-firefly"; +type AIGatewayHeaders = { + "cf-aig-metadata": Record | string; + "cf-aig-custom-cost": { + per_token_in?: number; + per_token_out?: number; + } | { + total_cost?: number; + } | string; + "cf-aig-cache-ttl": number | string; + "cf-aig-skip-cache": boolean | string; + "cf-aig-cache-key": string; + "cf-aig-collect-log": boolean | string; + Authorization: string; + "Content-Type": string; + [key: string]: string | number | boolean | object; +}; +type AIGatewayUniversalRequest = { + provider: AIGatewayProviders | string; // eslint-disable-line + endpoint: string; + headers: Partial; + query: unknown; +}; +interface AiGatewayInternalError extends Error { +} +interface AiGatewayLogNotFound extends Error { +} +declare abstract class AiGateway { + patchLog(logId: string, data: AiGatewayPatchLog): Promise; + getLog(logId: string): Promise; + run(data: AIGatewayUniversalRequest | AIGatewayUniversalRequest[]): Promise; + getUrl(provider?: AIGatewayProviders | string): Promise; // eslint-disable-line +} +interface AutoRAGInternalError extends Error { +} +interface AutoRAGNotFoundError extends Error { +} +interface AutoRAGUnauthorizedError extends Error { +} +type AutoRagSearchRequest = { + query: string; + max_num_results?: number; + ranking_options?: { + ranker?: string; + score_threshold?: number; + }; + rewrite_query?: boolean; +}; +type AutoRagSearchResponse = { + object: "vector_store.search_results.page"; + search_query: string; + data: { + file_id: string; + filename: string; + score: number; + attributes: Record; + content: { + type: "text"; + text: string; + }[]; + }[]; + has_more: boolean; + next_page: string | null; +}; +type AutoRagAiSearchResponse = AutoRagSearchResponse & { + response: string; +}; +declare abstract class AutoRAG { + search(params: AutoRagSearchRequest): Promise; + aiSearch(params: AutoRagSearchRequest): Promise; +} +interface BasicImageTransformations { + /** + * Maximum width in image pixels. The value must be an integer. + */ + width?: number; + /** + * Maximum height in image pixels. The value must be an integer. + */ + height?: number; + /** + * Resizing mode as a string. It affects interpretation of width and height + * options: + * - scale-down: Similar to contain, but the image is never enlarged. If + * the image is larger than given width or height, it will be resized. + * Otherwise its original size will be kept. + * - contain: Resizes to maximum size that fits within the given width and + * height. If only a single dimension is given (e.g. only width), the + * image will be shrunk or enlarged to exactly match that dimension. + * Aspect ratio is always preserved. + * - cover: Resizes (shrinks or enlarges) to fill the entire area of width + * and height. If the image has an aspect ratio different from the ratio + * of width and height, it will be cropped to fit. + * - crop: The image will be shrunk and cropped to fit within the area + * specified by width and height. The image will not be enlarged. For images + * smaller than the given dimensions it's the same as scale-down. For + * images larger than the given dimensions, it's the same as cover. + * See also trim. + * - pad: Resizes to the maximum size that fits within the given width and + * height, and then fills the remaining area with a background color + * (white by default). Use of this mode is not recommended, as the same + * effect can be more efficiently achieved with the contain mode and the + * CSS object-fit: contain property. + * - squeeze: Stretches and deforms to the width and height given, even if it + * breaks aspect ratio + */ + fit?: "scale-down" | "contain" | "cover" | "crop" | "pad" | "squeeze"; + /** + * When cropping with fit: "cover", this defines the side or point that should + * be left uncropped. The value is either a string + * "left", "right", "top", "bottom", "auto", or "center" (the default), + * or an object {x, y} containing focal point coordinates in the original + * image expressed as fractions ranging from 0.0 (top or left) to 1.0 + * (bottom or right), 0.5 being the center. {fit: "cover", gravity: "top"} will + * crop bottom or left and right sides as necessary, but won’t crop anything + * from the top. {fit: "cover", gravity: {x:0.5, y:0.2}} will crop each side to + * preserve as much as possible around a point at 20% of the height of the + * source image. + */ + gravity?: 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | BasicImageTransformationsGravityCoordinates; + /** + * Background color to add underneath the image. Applies only to images with + * transparency (such as PNG). Accepts any CSS color (#RRGGBB, rgba(…), + * hsl(…), etc.) + */ + background?: string; + /** + * Number of degrees (90, 180, 270) to rotate the image by. width and height + * options refer to axes after rotation. + */ + rotate?: 0 | 90 | 180 | 270 | 360; +} +interface BasicImageTransformationsGravityCoordinates { + x?: number; + y?: number; + mode?: 'remainder' | 'box-center'; +} +/** + * In addition to the properties you can set in the RequestInit dict + * that you pass as an argument to the Request constructor, you can + * set certain properties of a `cf` object to control how Cloudflare + * features are applied to that new Request. + * + * Note: Currently, these properties cannot be tested in the + * playground. + */ +interface RequestInitCfProperties extends Record { + cacheEverything?: boolean; + /** + * A request's cache key is what determines if two requests are + * "the same" for caching purposes. If a request has the same cache key + * as some previous request, then we can serve the same cached response for + * both. (e.g. 'some-key') + * + * Only available for Enterprise customers. + */ + cacheKey?: string; + /** + * This allows you to append additional Cache-Tag response headers + * to the origin response without modifications to the origin server. + * This will allow for greater control over the Purge by Cache Tag feature + * utilizing changes only in the Workers process. + * + * Only available for Enterprise customers. + */ + cacheTags?: string[]; + /** + * Force response to be cached for a given number of seconds. (e.g. 300) + */ + cacheTtl?: number; + /** + * Force response to be cached for a given number of seconds based on the Origin status code. + * (e.g. { '200-299': 86400, '404': 1, '500-599': 0 }) + */ + cacheTtlByStatus?: Record; + scrapeShield?: boolean; + apps?: boolean; + image?: RequestInitCfPropertiesImage; + minify?: RequestInitCfPropertiesImageMinify; + mirage?: boolean; + polish?: "lossy" | "lossless" | "off"; + r2?: RequestInitCfPropertiesR2; + /** + * Redirects the request to an alternate origin server. You can use this, + * for example, to implement load balancing across several origins. + * (e.g.us-east.example.com) + * + * Note - For security reasons, the hostname set in resolveOverride must + * be proxied on the same Cloudflare zone of the incoming request. + * Otherwise, the setting is ignored. CNAME hosts are allowed, so to + * resolve to a host under a different domain or a DNS only domain first + * declare a CNAME record within your own zone’s DNS mapping to the + * external hostname, set proxy on Cloudflare, then set resolveOverride + * to point to that CNAME record. + */ + resolveOverride?: string; +} +interface RequestInitCfPropertiesImageDraw extends BasicImageTransformations { + /** + * Absolute URL of the image file to use for the drawing. It can be any of + * the supported file formats. For drawing of watermarks or non-rectangular + * overlays we recommend using PNG or WebP images. + */ + url: string; + /** + * Floating-point number between 0 (transparent) and 1 (opaque). + * For example, opacity: 0.5 makes overlay semitransparent. + */ + opacity?: number; + /** + * - If set to true, the overlay image will be tiled to cover the entire + * area. This is useful for stock-photo-like watermarks. + * - If set to "x", the overlay image will be tiled horizontally only + * (form a line). + * - If set to "y", the overlay image will be tiled vertically only + * (form a line). + */ + repeat?: true | "x" | "y"; + /** + * Position of the overlay image relative to a given edge. Each property is + * an offset in pixels. 0 aligns exactly to the edge. For example, left: 10 + * positions left side of the overlay 10 pixels from the left edge of the + * image it's drawn over. bottom: 0 aligns bottom of the overlay with bottom + * of the background image. + * + * Setting both left & right, or both top & bottom is an error. + * + * If no position is specified, the image will be centered. + */ + top?: number; + left?: number; + bottom?: number; + right?: number; +} +interface RequestInitCfPropertiesImage extends BasicImageTransformations { + /** + * Device Pixel Ratio. Default 1. Multiplier for width/height that makes it + * easier to specify higher-DPI sizes in . + */ + dpr?: number; + /** + * Allows you to trim your image. Takes dpr into account and is performed before + * resizing or rotation. + * + * It can be used as: + * - left, top, right, bottom - it will specify the number of pixels to cut + * off each side + * - width, height - the width/height you'd like to end up with - can be used + * in combination with the properties above + * - border - this will automatically trim the surroundings of an image based on + * it's color. It consists of three properties: + * - color: rgb or hex representation of the color you wish to trim (todo: verify the rgba bit) + * - tolerance: difference from color to treat as color + * - keep: the number of pixels of border to keep + */ + trim?: "border" | { + top?: number; + bottom?: number; + left?: number; + right?: number; + width?: number; + height?: number; + border?: boolean | { + color?: string; + tolerance?: number; + keep?: number; + }; + }; + /** + * Quality setting from 1-100 (useful values are in 60-90 range). Lower values + * make images look worse, but load faster. The default is 85. It applies only + * to JPEG and WebP images. It doesn’t have any effect on PNG. + */ + quality?: number | "low" | "medium-low" | "medium-high" | "high"; + /** + * Output format to generate. It can be: + * - avif: generate images in AVIF format. + * - webp: generate images in Google WebP format. Set quality to 100 to get + * the WebP-lossless format. + * - json: instead of generating an image, outputs information about the + * image, in JSON format. The JSON object will contain image size + * (before and after resizing), source image’s MIME type, file size, etc. + * - jpeg: generate images in JPEG format. + * - png: generate images in PNG format. + */ + format?: "avif" | "webp" | "json" | "jpeg" | "png" | "baseline-jpeg" | "png-force" | "svg"; + /** + * Whether to preserve animation frames from input files. Default is true. + * Setting it to false reduces animations to still images. This setting is + * recommended when enlarging images or processing arbitrary user content, + * because large GIF animations can weigh tens or even hundreds of megabytes. + * It is also useful to set anim:false when using format:"json" to get the + * response quicker without the number of frames. + */ + anim?: boolean; + /** + * What EXIF data should be preserved in the output image. Note that EXIF + * rotation and embedded color profiles are always applied ("baked in" into + * the image), and aren't affected by this option. Note that if the Polish + * feature is enabled, all metadata may have been removed already and this + * option may have no effect. + * - keep: Preserve most of EXIF metadata, including GPS location if there's + * any. + * - copyright: Only keep the copyright tag, and discard everything else. + * This is the default behavior for JPEG files. + * - none: Discard all invisible EXIF metadata. Currently WebP and PNG + * output formats always discard metadata. + */ + metadata?: "keep" | "copyright" | "none"; + /** + * Strength of sharpening filter to apply to the image. Floating-point + * number between 0 (no sharpening, default) and 10 (maximum). 1.0 is a + * recommended value for downscaled images. + */ + sharpen?: number; + /** + * Radius of a blur filter (approximate gaussian). Maximum supported radius + * is 250. + */ + blur?: number; + /** + * Overlays are drawn in the order they appear in the array (last array + * entry is the topmost layer). + */ + draw?: RequestInitCfPropertiesImageDraw[]; + /** + * Fetching image from authenticated origin. Setting this property will + * pass authentication headers (Authorization, Cookie, etc.) through to + * the origin. + */ + "origin-auth"?: "share-publicly"; + /** + * Adds a border around the image. The border is added after resizing. Border + * width takes dpr into account, and can be specified either using a single + * width property, or individually for each side. + */ + border?: { + color: string; + width: number; + } | { + color: string; + top: number; + right: number; + bottom: number; + left: number; + }; + /** + * Increase brightness by a factor. A value of 1.0 equals no change, a value + * of 0.5 equals half brightness, and a value of 2.0 equals twice as bright. + * 0 is ignored. + */ + brightness?: number; + /** + * Increase contrast by a factor. A value of 1.0 equals no change, a value of + * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is + * ignored. + */ + contrast?: number; + /** + * Increase exposure by a factor. A value of 1.0 equals no change, a value of + * 0.5 darkens the image, and a value of 2.0 lightens the image. 0 is ignored. + */ + gamma?: number; + /** + * Increase contrast by a factor. A value of 1.0 equals no change, a value of + * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is + * ignored. + */ + saturation?: number; + /** + * Flips the images horizontally, vertically, or both. Flipping is applied before + * rotation, so if you apply flip=h,rotate=90 then the image will be flipped + * horizontally, then rotated by 90 degrees. + */ + flip?: 'h' | 'v' | 'hv'; + /** + * Slightly reduces latency on a cache miss by selecting a + * quickest-to-compress file format, at a cost of increased file size and + * lower image quality. It will usually override the format option and choose + * JPEG over WebP or AVIF. We do not recommend using this option, except in + * unusual circumstances like resizing uncacheable dynamically-generated + * images. + */ + compression?: "fast"; +} +interface RequestInitCfPropertiesImageMinify { + javascript?: boolean; + css?: boolean; + html?: boolean; +} +interface RequestInitCfPropertiesR2 { + /** + * Colo id of bucket that an object is stored in + */ + bucketColoId?: number; +} +/** + * Request metadata provided by Cloudflare's edge. + */ +type IncomingRequestCfProperties = IncomingRequestCfPropertiesBase & IncomingRequestCfPropertiesBotManagementEnterprise & IncomingRequestCfPropertiesCloudflareForSaaSEnterprise & IncomingRequestCfPropertiesGeographicInformation & IncomingRequestCfPropertiesCloudflareAccessOrApiShield; +interface IncomingRequestCfPropertiesBase extends Record { + /** + * [ASN](https://www.iana.org/assignments/as-numbers/as-numbers.xhtml) of the incoming request. + * + * @example 395747 + */ + asn: number; + /** + * The organization which owns the ASN of the incoming request. + * + * @example "Google Cloud" + */ + asOrganization: string; + /** + * The original value of the `Accept-Encoding` header if Cloudflare modified it. + * + * @example "gzip, deflate, br" + */ + clientAcceptEncoding?: string; + /** + * The number of milliseconds it took for the request to reach your worker. + * + * @example 22 + */ + clientTcpRtt?: number; + /** + * The three-letter [IATA](https://en.wikipedia.org/wiki/IATA_airport_code) + * airport code of the data center that the request hit. + * + * @example "DFW" + */ + colo: string; + /** + * Represents the upstream's response to a + * [TCP `keepalive` message](https://tldp.org/HOWTO/TCP-Keepalive-HOWTO/overview.html) + * from cloudflare. + * + * For workers with no upstream, this will always be `1`. + * + * @example 3 + */ + edgeRequestKeepAliveStatus: IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus; + /** + * The HTTP Protocol the request used. + * + * @example "HTTP/2" + */ + httpProtocol: string; + /** + * The browser-requested prioritization information in the request object. + * + * If no information was set, defaults to the empty string `""` + * + * @example "weight=192;exclusive=0;group=3;group-weight=127" + * @default "" + */ + requestPriority: string; + /** + * The TLS version of the connection to Cloudflare. + * In requests served over plaintext (without TLS), this property is the empty string `""`. + * + * @example "TLSv1.3" + */ + tlsVersion: string; + /** + * The cipher for the connection to Cloudflare. + * In requests served over plaintext (without TLS), this property is the empty string `""`. + * + * @example "AEAD-AES128-GCM-SHA256" + */ + tlsCipher: string; + /** + * Metadata containing the [`HELLO`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2) and [`FINISHED`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9) messages from this request's TLS handshake. + * + * If the incoming request was served over plaintext (without TLS) this field is undefined. + */ + tlsExportedAuthenticator?: IncomingRequestCfPropertiesExportedAuthenticatorMetadata; +} +interface IncomingRequestCfPropertiesBotManagementBase { + /** + * Cloudflare’s [level of certainty](https://developers.cloudflare.com/bots/concepts/bot-score/) that a request comes from a bot, + * represented as an integer percentage between `1` (almost certainly a bot) and `99` (almost certainly human). + * + * @example 54 + */ + score: number; + /** + * A boolean value that is true if the request comes from a good bot, like Google or Bing. + * Most customers choose to allow this traffic. For more details, see [Traffic from known bots](https://developers.cloudflare.com/firewall/known-issues-and-faq/#how-does-firewall-rules-handle-traffic-from-known-bots). + */ + verifiedBot: boolean; + /** + * A boolean value that is true if the request originates from a + * Cloudflare-verified proxy service. + */ + corporateProxy: boolean; + /** + * A boolean value that's true if the request matches [file extensions](https://developers.cloudflare.com/bots/reference/static-resources/) for many types of static resources. + */ + staticResource: boolean; + /** + * List of IDs that correlate to the Bot Management heuristic detections made on a request (you can have multiple heuristic detections on the same request). + */ + detectionIds: number[]; +} +interface IncomingRequestCfPropertiesBotManagement { + /** + * Results of Cloudflare's Bot Management analysis + */ + botManagement: IncomingRequestCfPropertiesBotManagementBase; + /** + * Duplicate of `botManagement.score`. + * + * @deprecated + */ + clientTrustScore: number; +} +interface IncomingRequestCfPropertiesBotManagementEnterprise extends IncomingRequestCfPropertiesBotManagement { + /** + * Results of Cloudflare's Bot Management analysis + */ + botManagement: IncomingRequestCfPropertiesBotManagementBase & { + /** + * A [JA3 Fingerprint](https://developers.cloudflare.com/bots/concepts/ja3-fingerprint/) to help profile specific SSL/TLS clients + * across different destination IPs, Ports, and X509 certificates. + */ + ja3Hash: string; + }; +} +interface IncomingRequestCfPropertiesCloudflareForSaaSEnterprise { + /** + * Custom metadata set per-host in [Cloudflare for SaaS](https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/). + * + * This field is only present if you have Cloudflare for SaaS enabled on your account + * and you have followed the [required steps to enable it]((https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/domain-support/custom-metadata/)). + */ + hostMetadata: HostMetadata; +} +interface IncomingRequestCfPropertiesCloudflareAccessOrApiShield { + /** + * Information about the client certificate presented to Cloudflare. + * + * This is populated when the incoming request is served over TLS using + * either Cloudflare Access or API Shield (mTLS) + * and the presented SSL certificate has a valid + * [Certificate Serial Number](https://ldapwiki.com/wiki/Certificate%20Serial%20Number) + * (i.e., not `null` or `""`). + * + * Otherwise, a set of placeholder values are used. + * + * The property `certPresented` will be set to `"1"` when + * the object is populated (i.e. the above conditions were met). + */ + tlsClientAuth: IncomingRequestCfPropertiesTLSClientAuth | IncomingRequestCfPropertiesTLSClientAuthPlaceholder; +} +/** + * Metadata about the request's TLS handshake + */ +interface IncomingRequestCfPropertiesExportedAuthenticatorMetadata { + /** + * The client's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal + * + * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" + */ + clientHandshake: string; + /** + * The server's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal + * + * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" + */ + serverHandshake: string; + /** + * The client's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal + * + * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" + */ + clientFinished: string; + /** + * The server's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal + * + * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" + */ + serverFinished: string; +} +/** + * Geographic data about the request's origin. + */ +interface IncomingRequestCfPropertiesGeographicInformation { + /** + * The [ISO 3166-1 Alpha 2](https://www.iso.org/iso-3166-country-codes.html) country code the request originated from. + * + * If your worker is [configured to accept TOR connections](https://support.cloudflare.com/hc/en-us/articles/203306930-Understanding-Cloudflare-Tor-support-and-Onion-Routing), this may also be `"T1"`, indicating a request that originated over TOR. + * + * If Cloudflare is unable to determine where the request originated this property is omitted. + * + * The country code `"T1"` is used for requests originating on TOR. + * + * @example "GB" + */ + country?: Iso3166Alpha2Code | "T1"; + /** + * If present, this property indicates that the request originated in the EU + * + * @example "1" + */ + isEUCountry?: "1"; + /** + * A two-letter code indicating the continent the request originated from. + * + * @example "AN" + */ + continent?: ContinentCode; + /** + * The city the request originated from + * + * @example "Austin" + */ + city?: string; + /** + * Postal code of the incoming request + * + * @example "78701" + */ + postalCode?: string; + /** + * Latitude of the incoming request + * + * @example "30.27130" + */ + latitude?: string; + /** + * Longitude of the incoming request + * + * @example "-97.74260" + */ + longitude?: string; + /** + * Timezone of the incoming request + * + * @example "America/Chicago" + */ + timezone?: string; + /** + * If known, the ISO 3166-2 name for the first level region associated with + * the IP address of the incoming request + * + * @example "Texas" + */ + region?: string; + /** + * If known, the ISO 3166-2 code for the first-level region associated with + * the IP address of the incoming request + * + * @example "TX" + */ + regionCode?: string; + /** + * Metro code (DMA) of the incoming request + * + * @example "635" + */ + metroCode?: string; +} +/** Data about the incoming request's TLS certificate */ +interface IncomingRequestCfPropertiesTLSClientAuth { + /** Always `"1"`, indicating that the certificate was presented */ + certPresented: "1"; + /** + * Result of certificate verification. + * + * @example "FAILED:self signed certificate" + */ + certVerified: Exclude; + /** The presented certificate's revokation status. + * + * - A value of `"1"` indicates the certificate has been revoked + * - A value of `"0"` indicates the certificate has not been revoked + */ + certRevoked: "1" | "0"; + /** + * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) + * + * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certIssuerDN: string; + /** + * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) + * + * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certSubjectDN: string; + /** + * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) + * + * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certIssuerDNRFC2253: string; + /** + * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) + * + * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" + */ + certSubjectDNRFC2253: string; + /** The certificate issuer's distinguished name (legacy policies) */ + certIssuerDNLegacy: string; + /** The certificate subject's distinguished name (legacy policies) */ + certSubjectDNLegacy: string; + /** + * The certificate's serial number + * + * @example "00936EACBE07F201DF" + */ + certSerial: string; + /** + * The certificate issuer's serial number + * + * @example "2489002934BDFEA34" + */ + certIssuerSerial: string; + /** + * The certificate's Subject Key Identifier + * + * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" + */ + certSKI: string; + /** + * The certificate issuer's Subject Key Identifier + * + * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" + */ + certIssuerSKI: string; + /** + * The certificate's SHA-1 fingerprint + * + * @example "6b9109f323999e52259cda7373ff0b4d26bd232e" + */ + certFingerprintSHA1: string; + /** + * The certificate's SHA-256 fingerprint + * + * @example "acf77cf37b4156a2708e34c4eb755f9b5dbbe5ebb55adfec8f11493438d19e6ad3f157f81fa3b98278453d5652b0c1fd1d71e5695ae4d709803a4d3f39de9dea" + */ + certFingerprintSHA256: string; + /** + * The effective starting date of the certificate + * + * @example "Dec 22 19:39:00 2018 GMT" + */ + certNotBefore: string; + /** + * The effective expiration date of the certificate + * + * @example "Dec 22 19:39:00 2018 GMT" + */ + certNotAfter: string; +} +/** Placeholder values for TLS Client Authorization */ +interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder { + certPresented: "0"; + certVerified: "NONE"; + certRevoked: "0"; + certIssuerDN: ""; + certSubjectDN: ""; + certIssuerDNRFC2253: ""; + certSubjectDNRFC2253: ""; + certIssuerDNLegacy: ""; + certSubjectDNLegacy: ""; + certSerial: ""; + certIssuerSerial: ""; + certSKI: ""; + certIssuerSKI: ""; + certFingerprintSHA1: ""; + certFingerprintSHA256: ""; + certNotBefore: ""; + certNotAfter: ""; +} +/** Possible outcomes of TLS verification */ +declare type CertVerificationStatus = +/** Authentication succeeded */ +"SUCCESS" +/** No certificate was presented */ + | "NONE" +/** Failed because the certificate was self-signed */ + | "FAILED:self signed certificate" +/** Failed because the certificate failed a trust chain check */ + | "FAILED:unable to verify the first certificate" +/** Failed because the certificate not yet valid */ + | "FAILED:certificate is not yet valid" +/** Failed because the certificate is expired */ + | "FAILED:certificate has expired" +/** Failed for another unspecified reason */ + | "FAILED"; +/** + * An upstream endpoint's response to a TCP `keepalive` message from Cloudflare. + */ +declare type IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus = 0 /** Unknown */ | 1 /** no keepalives (not found) */ | 2 /** no connection re-use, opening keepalive connection failed */ | 3 /** no connection re-use, keepalive accepted and saved */ | 4 /** connection re-use, refused by the origin server (`TCP FIN`) */ | 5; /** connection re-use, accepted by the origin server */ +/** ISO 3166-1 Alpha-2 codes */ +declare type Iso3166Alpha2Code = "AD" | "AE" | "AF" | "AG" | "AI" | "AL" | "AM" | "AO" | "AQ" | "AR" | "AS" | "AT" | "AU" | "AW" | "AX" | "AZ" | "BA" | "BB" | "BD" | "BE" | "BF" | "BG" | "BH" | "BI" | "BJ" | "BL" | "BM" | "BN" | "BO" | "BQ" | "BR" | "BS" | "BT" | "BV" | "BW" | "BY" | "BZ" | "CA" | "CC" | "CD" | "CF" | "CG" | "CH" | "CI" | "CK" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CV" | "CW" | "CX" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "DO" | "DZ" | "EC" | "EE" | "EG" | "EH" | "ER" | "ES" | "ET" | "FI" | "FJ" | "FK" | "FM" | "FO" | "FR" | "GA" | "GB" | "GD" | "GE" | "GF" | "GG" | "GH" | "GI" | "GL" | "GM" | "GN" | "GP" | "GQ" | "GR" | "GS" | "GT" | "GU" | "GW" | "GY" | "HK" | "HM" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IM" | "IN" | "IO" | "IQ" | "IR" | "IS" | "IT" | "JE" | "JM" | "JO" | "JP" | "KE" | "KG" | "KH" | "KI" | "KM" | "KN" | "KP" | "KR" | "KW" | "KY" | "KZ" | "LA" | "LB" | "LC" | "LI" | "LK" | "LR" | "LS" | "LT" | "LU" | "LV" | "LY" | "MA" | "MC" | "MD" | "ME" | "MF" | "MG" | "MH" | "MK" | "ML" | "MM" | "MN" | "MO" | "MP" | "MQ" | "MR" | "MS" | "MT" | "MU" | "MV" | "MW" | "MX" | "MY" | "MZ" | "NA" | "NC" | "NE" | "NF" | "NG" | "NI" | "NL" | "NO" | "NP" | "NR" | "NU" | "NZ" | "OM" | "PA" | "PE" | "PF" | "PG" | "PH" | "PK" | "PL" | "PM" | "PN" | "PR" | "PS" | "PT" | "PW" | "PY" | "QA" | "RE" | "RO" | "RS" | "RU" | "RW" | "SA" | "SB" | "SC" | "SD" | "SE" | "SG" | "SH" | "SI" | "SJ" | "SK" | "SL" | "SM" | "SN" | "SO" | "SR" | "SS" | "ST" | "SV" | "SX" | "SY" | "SZ" | "TC" | "TD" | "TF" | "TG" | "TH" | "TJ" | "TK" | "TL" | "TM" | "TN" | "TO" | "TR" | "TT" | "TV" | "TW" | "TZ" | "UA" | "UG" | "UM" | "US" | "UY" | "UZ" | "VA" | "VC" | "VE" | "VG" | "VI" | "VN" | "VU" | "WF" | "WS" | "YE" | "YT" | "ZA" | "ZM" | "ZW"; +/** The 2-letter continent codes Cloudflare uses */ +declare type ContinentCode = "AF" | "AN" | "AS" | "EU" | "NA" | "OC" | "SA"; +type CfProperties = IncomingRequestCfProperties | RequestInitCfProperties; +interface D1Meta { + duration: number; + size_after: number; + rows_read: number; + rows_written: number; + last_row_id: number; + changed_db: boolean; + changes: number; + /** + * The region of the database instance that executed the query. + */ + served_by_region?: string; + /** + * True if-and-only-if the database instance that executed the query was the primary. + */ + served_by_primary?: boolean; + timings?: { + /** + * The duration of the SQL query execution by the database instance. It doesn't include any network time. + */ + sql_duration_ms: number; + }; +} +interface D1Response { + success: true; + meta: D1Meta & Record; + error?: never; +} +type D1Result = D1Response & { + results: T[]; +}; +interface D1ExecResult { + count: number; + duration: number; +} +type D1SessionConstraint = +// Indicates that the first query should go to the primary, and the rest queries +// using the same D1DatabaseSession will go to any replica that is consistent with +// the bookmark maintained by the session (returned by the first query). +"first-primary" +// Indicates that the first query can go anywhere (primary or replica), and the rest queries +// using the same D1DatabaseSession will go to any replica that is consistent with +// the bookmark maintained by the session (returned by the first query). + | "first-unconstrained"; +type D1SessionBookmark = string; +declare abstract class D1Database { + prepare(query: string): D1PreparedStatement; + batch(statements: D1PreparedStatement[]): Promise[]>; + exec(query: string): Promise; + /** + * Creates a new D1 Session anchored at the given constraint or the bookmark. + * All queries executed using the created session will have sequential consistency, + * meaning that all writes done through the session will be visible in subsequent reads. + * + * @param constraintOrBookmark Either the session constraint or the explicit bookmark to anchor the created session. + */ + withSession(constraintOrBookmark?: D1SessionBookmark | D1SessionConstraint): D1DatabaseSession; + /** + * @deprecated dump() will be removed soon, only applies to deprecated alpha v1 databases. + */ + dump(): Promise; +} +declare abstract class D1DatabaseSession { + prepare(query: string): D1PreparedStatement; + batch(statements: D1PreparedStatement[]): Promise[]>; + /** + * @returns The latest session bookmark across all executed queries on the session. + * If no query has been executed yet, `null` is returned. + */ + getBookmark(): D1SessionBookmark | null; +} +declare abstract class D1PreparedStatement { + bind(...values: unknown[]): D1PreparedStatement; + first(colName: string): Promise; + first>(): Promise; + run>(): Promise>; + all>(): Promise>; + raw(options: { + columnNames: true; + }): Promise<[ + string[], + ...T[] + ]>; + raw(options?: { + columnNames?: false; + }): Promise; +} +// `Disposable` was added to TypeScript's standard lib types in version 5.2. +// To support older TypeScript versions, define an empty `Disposable` interface. +// Users won't be able to use `using`/`Symbol.dispose` without upgrading to 5.2, +// but this will ensure type checking on older versions still passes. +// TypeScript's interface merging will ensure our empty interface is effectively +// ignored when `Disposable` is included in the standard lib. +interface Disposable { +} +/** + * An email message that can be sent from a Worker. + */ +interface EmailMessage { + /** + * Envelope From attribute of the email message. + */ + readonly from: string; + /** + * Envelope To attribute of the email message. + */ + readonly to: string; +} +/** + * An email message that is sent to a consumer Worker and can be rejected/forwarded. + */ +interface ForwardableEmailMessage extends EmailMessage { + /** + * Stream of the email message content. + */ + readonly raw: ReadableStream; + /** + * An [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). + */ + readonly headers: Headers; + /** + * Size of the email message content. + */ + readonly rawSize: number; + /** + * Reject this email message by returning a permanent SMTP error back to the connecting client including the given reason. + * @param reason The reject reason. + * @returns void + */ + setReject(reason: string): void; + /** + * Forward this email message to a verified destination address of the account. + * @param rcptTo Verified destination address. + * @param headers A [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). + * @returns A promise that resolves when the email message is forwarded. + */ + forward(rcptTo: string, headers?: Headers): Promise; + /** + * Reply to the sender of this email message with a new EmailMessage object. + * @param message The reply message. + * @returns A promise that resolves when the email message is replied. + */ + reply(message: EmailMessage): Promise; +} +/** + * A binding that allows a Worker to send email messages. + */ +interface SendEmail { + send(message: EmailMessage): Promise; +} +declare abstract class EmailEvent extends ExtendableEvent { + readonly message: ForwardableEmailMessage; +} +declare type EmailExportedHandler = (message: ForwardableEmailMessage, env: Env, ctx: ExecutionContext) => void | Promise; +declare module "cloudflare:email" { + let _EmailMessage: { + prototype: EmailMessage; + new (from: string, to: string, raw: ReadableStream | string): EmailMessage; + }; + export { _EmailMessage as EmailMessage }; +} +interface Hyperdrive { + /** + * Connect directly to Hyperdrive as if it's your database, returning a TCP socket. + * + * Calling this method returns an idential socket to if you call + * `connect("host:port")` using the `host` and `port` fields from this object. + * Pick whichever approach works better with your preferred DB client library. + * + * Note that this socket is not yet authenticated -- it's expected that your + * code (or preferably, the client library of your choice) will authenticate + * using the information in this class's readonly fields. + */ + connect(): Socket; + /** + * A valid DB connection string that can be passed straight into the typical + * client library/driver/ORM. This will typically be the easiest way to use + * Hyperdrive. + */ + readonly connectionString: string; + /* + * A randomly generated hostname that is only valid within the context of the + * currently running Worker which, when passed into `connect()` function from + * the "cloudflare:sockets" module, will connect to the Hyperdrive instance + * for your database. + */ + readonly host: string; + /* + * The port that must be paired the the host field when connecting. + */ + readonly port: number; + /* + * The username to use when authenticating to your database via Hyperdrive. + * Unlike the host and password, this will be the same every time + */ + readonly user: string; + /* + * The randomly generated password to use when authenticating to your + * database via Hyperdrive. Like the host field, this password is only valid + * within the context of the currently running Worker instance from which + * it's read. + */ + readonly password: string; + /* + * The name of the database to connect to. + */ + readonly database: string; +} +// Copyright (c) 2024 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +type ImageInfoResponse = { + format: 'image/svg+xml'; +} | { + format: string; + fileSize: number; + width: number; + height: number; +}; +type ImageTransform = { + width?: number; + height?: number; + background?: string; + blur?: number; + border?: { + color?: string; + width?: number; + } | { + top?: number; + bottom?: number; + left?: number; + right?: number; + }; + brightness?: number; + contrast?: number; + fit?: 'scale-down' | 'contain' | 'pad' | 'squeeze' | 'cover' | 'crop'; + flip?: 'h' | 'v' | 'hv'; + gamma?: number; + gravity?: 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | { + x?: number; + y?: number; + mode: 'remainder' | 'box-center'; + }; + rotate?: 0 | 90 | 180 | 270; + saturation?: number; + sharpen?: number; + trim?: "border" | { + top?: number; + bottom?: number; + left?: number; + right?: number; + width?: number; + height?: number; + border?: boolean | { + color?: string; + tolerance?: number; + keep?: number; + }; + }; +}; +type ImageDrawOptions = { + opacity?: number; + repeat?: boolean | string; + top?: number; + left?: number; + bottom?: number; + right?: number; +}; +type ImageOutputOptions = { + format: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp' | 'image/avif' | 'rgb' | 'rgba'; + quality?: number; + background?: string; +}; +interface ImagesBinding { + /** + * Get image metadata (type, width and height) + * @throws {@link ImagesError} with code 9412 if input is not an image + * @param stream The image bytes + */ + info(stream: ReadableStream): Promise; + /** + * Begin applying a series of transformations to an image + * @param stream The image bytes + * @returns A transform handle + */ + input(stream: ReadableStream): ImageTransformer; +} +interface ImageTransformer { + /** + * Apply transform next, returning a transform handle. + * You can then apply more transformations, draw, or retrieve the output. + * @param transform + */ + transform(transform: ImageTransform): ImageTransformer; + /** + * Draw an image on this transformer, returning a transform handle. + * You can then apply more transformations, draw, or retrieve the output. + * @param image The image (or transformer that will give the image) to draw + * @param options The options configuring how to draw the image + */ + draw(image: ReadableStream | ImageTransformer, options?: ImageDrawOptions): ImageTransformer; + /** + * Retrieve the image that results from applying the transforms to the + * provided input + * @param options Options that apply to the output e.g. output format + */ + output(options: ImageOutputOptions): Promise; +} +interface ImageTransformationResult { + /** + * The image as a response, ready to store in cache or return to users + */ + response(): Response; + /** + * The content type of the returned image + */ + contentType(): string; + /** + * The bytes of the response + */ + image(): ReadableStream; +} +interface ImagesError extends Error { + readonly code: number; + readonly message: string; + readonly stack?: string; +} +type Params

= Record; +type EventContext = { + request: Request>; + functionPath: string; + waitUntil: (promise: Promise) => void; + passThroughOnException: () => void; + next: (input?: Request | string, init?: RequestInit) => Promise; + env: Env & { + ASSETS: { + fetch: typeof fetch; + }; + }; + params: Params

; + data: Data; +}; +type PagesFunction = Record> = (context: EventContext) => Response | Promise; +type EventPluginContext = { + request: Request>; + functionPath: string; + waitUntil: (promise: Promise) => void; + passThroughOnException: () => void; + next: (input?: Request | string, init?: RequestInit) => Promise; + env: Env & { + ASSETS: { + fetch: typeof fetch; + }; + }; + params: Params

; + data: Data; + pluginArgs: PluginArgs; +}; +type PagesPluginFunction = Record, PluginArgs = unknown> = (context: EventPluginContext) => Response | Promise; +declare module "assets:*" { + export const onRequest: PagesFunction; +} +// Copyright (c) 2022-2023 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +declare module "cloudflare:pipelines" { + export abstract class PipelineTransformationEntrypoint { + protected env: Env; + protected ctx: ExecutionContext; + constructor(ctx: ExecutionContext, env: Env); + /** + * run recieves an array of PipelineRecord which can be + * transformed and returned to the pipeline + * @param records Incoming records from the pipeline to be transformed + * @param metadata Information about the specific pipeline calling the transformation entrypoint + * @returns A promise containing the transformed PipelineRecord array + */ + public run(records: I[], metadata: PipelineBatchMetadata): Promise; + } + export type PipelineRecord = Record; + export type PipelineBatchMetadata = { + pipelineId: string; + pipelineName: string; + }; + export interface Pipeline { + /** + * The Pipeline interface represents the type of a binding to a Pipeline + * + * @param records The records to send to the pipeline + */ + send(records: T[]): Promise; + } +} +// PubSubMessage represents an incoming PubSub message. +// The message includes metadata about the broker, the client, and the payload +// itself. +// https://developers.cloudflare.com/pub-sub/ +interface PubSubMessage { + // Message ID + readonly mid: number; + // MQTT broker FQDN in the form mqtts://BROKER.NAMESPACE.cloudflarepubsub.com:PORT + readonly broker: string; + // The MQTT topic the message was sent on. + readonly topic: string; + // The client ID of the client that published this message. + readonly clientId: string; + // The unique identifier (JWT ID) used by the client to authenticate, if token + // auth was used. + readonly jti?: string; + // A Unix timestamp (seconds from Jan 1, 1970), set when the Pub/Sub Broker + // received the message from the client. + readonly receivedAt: number; + // An (optional) string with the MIME type of the payload, if set by the + // client. + readonly contentType: string; + // Set to 1 when the payload is a UTF-8 string + // https://docs.oasis-open.org/mqtt/mqtt/v5.0/os/mqtt-v5.0-os.html#_Toc3901063 + readonly payloadFormatIndicator: number; + // Pub/Sub (MQTT) payloads can be UTF-8 strings, or byte arrays. + // You can use payloadFormatIndicator to inspect this before decoding. + payload: string | Uint8Array; +} +// JsonWebKey extended by kid parameter +interface JsonWebKeyWithKid extends JsonWebKey { + // Key Identifier of the JWK + readonly kid: string; +} +interface RateLimitOptions { + key: string; +} +interface RateLimitOutcome { + success: boolean; +} +interface RateLimit { + /** + * Rate limit a request based on the provided options. + * @see https://developers.cloudflare.com/workers/runtime-apis/bindings/rate-limit/ + * @returns A promise that resolves with the outcome of the rate limit. + */ + limit(options: RateLimitOptions): Promise; +} +// Namespace for RPC utility types. Unfortunately, we can't use a `module` here as these types need +// to referenced by `Fetcher`. This is included in the "importable" version of the types which +// strips all `module` blocks. +declare namespace Rpc { + // Branded types for identifying `WorkerEntrypoint`/`DurableObject`/`Target`s. + // TypeScript uses *structural* typing meaning anything with the same shape as type `T` is a `T`. + // For the classes exported by `cloudflare:workers` we want *nominal* typing (i.e. we only want to + // accept `WorkerEntrypoint` from `cloudflare:workers`, not any other class with the same shape) + export const __RPC_STUB_BRAND: '__RPC_STUB_BRAND'; + export const __RPC_TARGET_BRAND: '__RPC_TARGET_BRAND'; + export const __WORKER_ENTRYPOINT_BRAND: '__WORKER_ENTRYPOINT_BRAND'; + export const __DURABLE_OBJECT_BRAND: '__DURABLE_OBJECT_BRAND'; + export const __WORKFLOW_ENTRYPOINT_BRAND: '__WORKFLOW_ENTRYPOINT_BRAND'; + export interface RpcTargetBranded { + [__RPC_TARGET_BRAND]: never; + } + export interface WorkerEntrypointBranded { + [__WORKER_ENTRYPOINT_BRAND]: never; + } + export interface DurableObjectBranded { + [__DURABLE_OBJECT_BRAND]: never; + } + export interface WorkflowEntrypointBranded { + [__WORKFLOW_ENTRYPOINT_BRAND]: never; + } + export type EntrypointBranded = WorkerEntrypointBranded | DurableObjectBranded | WorkflowEntrypointBranded; + // Types that can be used through `Stub`s + export type Stubable = RpcTargetBranded | ((...args: any[]) => any); + // Types that can be passed over RPC + // The reason for using a generic type here is to build a serializable subset of structured + // cloneable composite types. This allows types defined with the "interface" keyword to pass the + // serializable check as well. Otherwise, only types defined with the "type" keyword would pass. + type Serializable = + // Structured cloneables + BaseType + // Structured cloneable composites + | Map ? Serializable : never, T extends Map ? Serializable : never> | Set ? Serializable : never> | ReadonlyArray ? Serializable : never> | { + [K in keyof T]: K extends number | string ? Serializable : never; + } + // Special types + | Stub + // Serialized as stubs, see `Stubify` + | Stubable; + // Base type for all RPC stubs, including common memory management methods. + // `T` is used as a marker type for unwrapping `Stub`s later. + interface StubBase extends Disposable { + [__RPC_STUB_BRAND]: T; + dup(): this; + } + export type Stub = Provider & StubBase; + // This represents all the types that can be sent as-is over an RPC boundary + type BaseType = void | undefined | null | boolean | number | bigint | string | TypedArray | ArrayBuffer | DataView | Date | Error | RegExp | ReadableStream | WritableStream | Request | Response | Headers; + // Recursively rewrite all `Stubable` types with `Stub`s + // prettier-ignore + type Stubify = T extends Stubable ? Stub : T extends Map ? Map, Stubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { + [key: string | number]: any; + } ? { + [K in keyof T]: Stubify; + } : T; + // Recursively rewrite all `Stub`s with the corresponding `T`s. + // Note we use `StubBase` instead of `Stub` here to avoid circular dependencies: + // `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`. + // prettier-ignore + type Unstubify = T extends StubBase ? V : T extends Map ? Map, Unstubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { + [key: string | number]: unknown; + } ? { + [K in keyof T]: Unstubify; + } : T; + type UnstubifyAll = { + [I in keyof A]: Unstubify; + }; + // Utility type for adding `Provider`/`Disposable`s to `object` types only. + // Note `unknown & T` is equivalent to `T`. + type MaybeProvider = T extends object ? Provider : unknown; + type MaybeDisposable = T extends object ? Disposable : unknown; + // Type for method return or property on an RPC interface. + // - Stubable types are replaced by stubs. + // - Serializable types are passed by value, with stubable types replaced by stubs + // and a top-level `Disposer`. + // Everything else can't be passed over PRC. + // Technically, we use custom thenables here, but they quack like `Promise`s. + // Intersecting with `(Maybe)Provider` allows pipelining. + // prettier-ignore + type Result = R extends Stubable ? Promise> & Provider : R extends Serializable ? Promise & MaybeDisposable> & MaybeProvider : never; + // Type for method or property on an RPC interface. + // For methods, unwrap `Stub`s in parameters, and rewrite returns to be `Result`s. + // Unwrapping `Stub`s allows calling with `Stubable` arguments. + // For properties, rewrite types to be `Result`s. + // In each case, unwrap `Promise`s. + type MethodOrProperty = V extends (...args: infer P) => infer R ? (...args: UnstubifyAll

) => Result> : Result>; + // Type for the callable part of an `Provider` if `T` is callable. + // This is intersected with methods/properties. + type MaybeCallableProvider = T extends (...args: any[]) => any ? MethodOrProperty : unknown; + // Base type for all other types providing RPC-like interfaces. + // Rewrites all methods/properties to be `MethodOrProperty`s, while preserving callable types. + // `Reserved` names (e.g. stub method names like `dup()`) and symbols can't be accessed over RPC. + export type Provider = MaybeCallableProvider & { + [K in Exclude>]: MethodOrProperty; + }; +} +declare namespace Cloudflare { + interface Env { + } +} +declare module 'cloudflare:workers' { + export type RpcStub = Rpc.Stub; + export const RpcStub: { + new (value: T): Rpc.Stub; + }; + export abstract class RpcTarget implements Rpc.RpcTargetBranded { + [Rpc.__RPC_TARGET_BRAND]: never; + } + // `protected` fields don't appear in `keyof`s, so can't be accessed over RPC + export abstract class WorkerEntrypoint implements Rpc.WorkerEntrypointBranded { + [Rpc.__WORKER_ENTRYPOINT_BRAND]: never; + protected ctx: ExecutionContext; + protected env: Env; + constructor(ctx: ExecutionContext, env: Env); + fetch?(request: Request): Response | Promise; + tail?(events: TraceItem[]): void | Promise; + trace?(traces: TraceItem[]): void | Promise; + scheduled?(controller: ScheduledController): void | Promise; + queue?(batch: MessageBatch): void | Promise; + test?(controller: TestController): void | Promise; + } + export abstract class DurableObject implements Rpc.DurableObjectBranded { + [Rpc.__DURABLE_OBJECT_BRAND]: never; + protected ctx: DurableObjectState; + protected env: Env; + constructor(ctx: DurableObjectState, env: Env); + fetch?(request: Request): Response | Promise; + alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; + webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; + webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; + webSocketError?(ws: WebSocket, error: unknown): void | Promise; + } + export type WorkflowDurationLabel = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'year'; + export type WorkflowSleepDuration = `${number} ${WorkflowDurationLabel}${'s' | ''}` | number; + export type WorkflowDelayDuration = WorkflowSleepDuration; + export type WorkflowTimeoutDuration = WorkflowSleepDuration; + export type WorkflowBackoff = 'constant' | 'linear' | 'exponential'; + export type WorkflowStepConfig = { + retries?: { + limit: number; + delay: WorkflowDelayDuration | number; + backoff?: WorkflowBackoff; + }; + timeout?: WorkflowTimeoutDuration | number; + }; + export type WorkflowEvent = { + payload: Readonly; + timestamp: Date; + instanceId: string; + }; + export type WorkflowStepEvent = { + payload: Readonly; + timestamp: Date; + type: string; + }; + export abstract class WorkflowStep { + do>(name: string, callback: () => Promise): Promise; + do>(name: string, config: WorkflowStepConfig, callback: () => Promise): Promise; + sleep: (name: string, duration: WorkflowSleepDuration) => Promise; + sleepUntil: (name: string, timestamp: Date | number) => Promise; + waitForEvent>(name: string, options: { + type: string; + timeout?: WorkflowTimeoutDuration | number; + }): Promise>; + } + export abstract class WorkflowEntrypoint | unknown = unknown> implements Rpc.WorkflowEntrypointBranded { + [Rpc.__WORKFLOW_ENTRYPOINT_BRAND]: never; + protected ctx: ExecutionContext; + protected env: Env; + constructor(ctx: ExecutionContext, env: Env); + run(event: Readonly>, step: WorkflowStep): Promise; + } + export const env: Cloudflare.Env; +} +interface SecretsStoreSecret { + /** + * Get a secret from the Secrets Store, returning a string of the secret value + * if it exists, or throws an error if it does not exist + */ + get(): Promise; +} +declare module "cloudflare:sockets" { + function _connect(address: string | SocketAddress, options?: SocketOptions): Socket; + export { _connect as connect }; +} +declare namespace TailStream { + interface Header { + readonly name: string; + readonly value: string; + } + interface FetchEventInfo { + readonly type: "fetch"; + readonly method: string; + readonly url: string; + readonly cfJson: string; + readonly headers: Header[]; + } + interface JsRpcEventInfo { + readonly type: "jsrpc"; + readonly methodName: string; + } + interface ScheduledEventInfo { + readonly type: "scheduled"; + readonly scheduledTime: Date; + readonly cron: string; + } + interface AlarmEventInfo { + readonly type: "alarm"; + readonly scheduledTime: Date; + } + interface QueueEventInfo { + readonly type: "queue"; + readonly queueName: string; + readonly batchSize: number; + } + interface EmailEventInfo { + readonly type: "email"; + readonly mailFrom: string; + readonly rcptTo: string; + readonly rawSize: number; + } + interface TraceEventInfo { + readonly type: "trace"; + readonly traces: (string | null)[]; + } + interface HibernatableWebSocketEventInfoMessage { + readonly type: "message"; + } + interface HibernatableWebSocketEventInfoError { + readonly type: "error"; + } + interface HibernatableWebSocketEventInfoClose { + readonly type: "close"; + readonly code: number; + readonly wasClean: boolean; + } + interface HibernatableWebSocketEventInfo { + readonly type: "hibernatableWebSocket"; + readonly info: HibernatableWebSocketEventInfoClose | HibernatableWebSocketEventInfoError | HibernatableWebSocketEventInfoMessage; + } + interface Resume { + readonly type: "resume"; + readonly attachment?: any; + } + interface CustomEventInfo { + readonly type: "custom"; + } + interface FetchResponseInfo { + readonly type: "fetch"; + readonly statusCode: number; + } + type EventOutcome = "ok" | "canceled" | "exception" | "unknown" | "killSwitch" | "daemonDown" | "exceededCpu" | "exceededMemory" | "loadShed" | "responseStreamDisconnected" | "scriptNotFound"; + interface ScriptVersion { + readonly id: string; + readonly tag?: string; + readonly message?: string; + } + interface Trigger { + readonly traceId: string; + readonly invocationId: string; + readonly spanId: string; + } + interface Onset { + readonly type: "onset"; + readonly dispatchNamespace?: string; + readonly entrypoint?: string; + readonly scriptName?: string; + readonly scriptTags?: string[]; + readonly scriptVersion?: ScriptVersion; + readonly trigger?: Trigger; + readonly info: FetchEventInfo | JsRpcEventInfo | ScheduledEventInfo | AlarmEventInfo | QueueEventInfo | EmailEventInfo | TraceEventInfo | HibernatableWebSocketEventInfo | Resume | CustomEventInfo; + } + interface Outcome { + readonly type: "outcome"; + readonly outcome: EventOutcome; + readonly cpuTime: number; + readonly wallTime: number; + } + interface Hibernate { + readonly type: "hibernate"; + } + interface SpanOpen { + readonly type: "spanOpen"; + readonly op?: string; + readonly info?: FetchEventInfo | JsRpcEventInfo | Attribute[]; + } + interface SpanClose { + readonly type: "spanClose"; + readonly outcome: EventOutcome; + } + interface DiagnosticChannelEvent { + readonly type: "diagnosticChannel"; + readonly channel: string; + readonly message: any; + } + interface Exception { + readonly type: "exception"; + readonly name: string; + readonly message: string; + readonly stack?: string; + } + interface Log { + readonly type: "log"; + readonly level: "debug" | "error" | "info" | "log" | "warn"; + readonly message: string; + } + interface Return { + readonly type: "return"; + readonly info?: FetchResponseInfo | Attribute[]; + } + interface Link { + readonly type: "link"; + readonly label?: string; + readonly traceId: string; + readonly invocationId: string; + readonly spanId: string; + } + interface Attribute { + readonly type: "attribute"; + readonly name: string; + readonly value: string | string[] | boolean | boolean[] | number | number[]; + } + type Mark = DiagnosticChannelEvent | Exception | Log | Return | Link | Attribute[]; + interface TailEvent { + readonly traceId: string; + readonly invocationId: string; + readonly spanId: string; + readonly timestamp: Date; + readonly sequence: number; + readonly event: Onset | Outcome | Hibernate | SpanOpen | SpanClose | Mark; + } + type TailEventHandler = (event: TailEvent) => void | Promise; + type TailEventHandlerName = "onset" | "outcome" | "hibernate" | "spanOpen" | "spanClose" | "diagnosticChannel" | "exception" | "log" | "return" | "link" | "attribute"; + type TailEventHandlerObject = Record; + type TailEventHandlerType = TailEventHandler | TailEventHandlerObject; +} +// Copyright (c) 2022-2023 Cloudflare, Inc. +// Licensed under the Apache 2.0 license found in the LICENSE file or at: +// https://opensource.org/licenses/Apache-2.0 +/** + * Data types supported for holding vector metadata. + */ +type VectorizeVectorMetadataValue = string | number | boolean | string[]; +/** + * Additional information to associate with a vector. + */ +type VectorizeVectorMetadata = VectorizeVectorMetadataValue | Record; +type VectorFloatArray = Float32Array | Float64Array; +interface VectorizeError { + code?: number; + error: string; +} +/** + * Comparison logic/operation to use for metadata filtering. + * + * This list is expected to grow as support for more operations are released. + */ +type VectorizeVectorMetadataFilterOp = "$eq" | "$ne"; +/** + * Filter criteria for vector metadata used to limit the retrieved query result set. + */ +type VectorizeVectorMetadataFilter = { + [field: string]: Exclude | null | { + [Op in VectorizeVectorMetadataFilterOp]?: Exclude | null; + }; +}; +/** + * Supported distance metrics for an index. + * Distance metrics determine how other "similar" vectors are determined. + */ +type VectorizeDistanceMetric = "euclidean" | "cosine" | "dot-product"; +/** + * Metadata return levels for a Vectorize query. + * + * Default to "none". + * + * @property all Full metadata for the vector return set, including all fields (including those un-indexed) without truncation. This is a more expensive retrieval, as it requires additional fetching & reading of un-indexed data. + * @property indexed Return all metadata fields configured for indexing in the vector return set. This level of retrieval is "free" in that no additional overhead is incurred returning this data. However, note that indexed metadata is subject to truncation (especially for larger strings). + * @property none No indexed metadata will be returned. + */ +type VectorizeMetadataRetrievalLevel = "all" | "indexed" | "none"; +interface VectorizeQueryOptions { + topK?: number; + namespace?: string; + returnValues?: boolean; + returnMetadata?: boolean | VectorizeMetadataRetrievalLevel; + filter?: VectorizeVectorMetadataFilter; +} +/** + * Information about the configuration of an index. + */ +type VectorizeIndexConfig = { + dimensions: number; + metric: VectorizeDistanceMetric; +} | { + preset: string; // keep this generic, as we'll be adding more presets in the future and this is only in a read capacity +}; +/** + * Metadata about an existing index. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link VectorizeIndexInfo} for its post-beta equivalent. + */ +interface VectorizeIndexDetails { + /** The unique ID of the index */ + readonly id: string; + /** The name of the index. */ + name: string; + /** (optional) A human readable description for the index. */ + description?: string; + /** The index configuration, including the dimension size and distance metric. */ + config: VectorizeIndexConfig; + /** The number of records containing vectors within the index. */ + vectorsCount: number; +} +/** + * Metadata about an existing index. + */ +interface VectorizeIndexInfo { + /** The number of records containing vectors within the index. */ + vectorCount: number; + /** Number of dimensions the index has been configured for. */ + dimensions: number; + /** ISO 8601 datetime of the last processed mutation on in the index. All changes before this mutation will be reflected in the index state. */ + processedUpToDatetime: number; + /** UUIDv4 of the last mutation processed by the index. All changes before this mutation will be reflected in the index state. */ + processedUpToMutation: number; +} +/** + * Represents a single vector value set along with its associated metadata. + */ +interface VectorizeVector { + /** The ID for the vector. This can be user-defined, and must be unique. It should uniquely identify the object, and is best set based on the ID of what the vector represents. */ + id: string; + /** The vector values */ + values: VectorFloatArray | number[]; + /** The namespace this vector belongs to. */ + namespace?: string; + /** Metadata associated with the vector. Includes the values of other fields and potentially additional details. */ + metadata?: Record; +} +/** + * Represents a matched vector for a query along with its score and (if specified) the matching vector information. + */ +type VectorizeMatch = Pick, "values"> & Omit & { + /** The score or rank for similarity, when returned as a result */ + score: number; +}; +/** + * A set of matching {@link VectorizeMatch} for a particular query. + */ +interface VectorizeMatches { + matches: VectorizeMatch[]; + count: number; +} +/** + * Results of an operation that performed a mutation on a set of vectors. + * Here, `ids` is a list of vectors that were successfully processed. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link VectorizeAsyncMutation} for its post-beta equivalent. + */ +interface VectorizeVectorMutation { + /* List of ids of vectors that were successfully processed. */ + ids: string[]; + /* Total count of the number of processed vectors. */ + count: number; +} +/** + * Result type indicating a mutation on the Vectorize Index. + * Actual mutations are processed async where the `mutationId` is the unique identifier for the operation. + */ +interface VectorizeAsyncMutation { + /** The unique identifier for the async mutation operation containing the changeset. */ + mutationId: string; +} +/** + * A Vectorize Vector Search Index for querying vectors/embeddings. + * + * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. + * See {@link Vectorize} for its new implementation. + */ +declare abstract class VectorizeIndex { + /** + * Get information about the currently bound index. + * @returns A promise that resolves with information about the current index. + */ + public describe(): Promise; + /** + * Use the provided vector to perform a similarity search across the index. + * @param vector Input vector that will be used to drive the similarity search. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; + /** + * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. + * @param vectors List of vectors that will be inserted. + * @returns A promise that resolves with the ids & count of records that were successfully processed. + */ + public insert(vectors: VectorizeVector[]): Promise; + /** + * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. + * @param vectors List of vectors that will be upserted. + * @returns A promise that resolves with the ids & count of records that were successfully processed. + */ + public upsert(vectors: VectorizeVector[]): Promise; + /** + * Delete a list of vectors with a matching id. + * @param ids List of vector ids that should be deleted. + * @returns A promise that resolves with the ids & count of records that were successfully processed (and thus deleted). + */ + public deleteByIds(ids: string[]): Promise; + /** + * Get a list of vectors with a matching id. + * @param ids List of vector ids that should be returned. + * @returns A promise that resolves with the raw unscored vectors matching the id set. + */ + public getByIds(ids: string[]): Promise; +} +/** + * A Vectorize Vector Search Index for querying vectors/embeddings. + * + * Mutations in this version are async, returning a mutation id. + */ +declare abstract class Vectorize { + /** + * Get information about the currently bound index. + * @returns A promise that resolves with information about the current index. + */ + public describe(): Promise; + /** + * Use the provided vector to perform a similarity search across the index. + * @param vector Input vector that will be used to drive the similarity search. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; + /** + * Use the provided vector-id to perform a similarity search across the index. + * @param vectorId Id for a vector in the index against which the index should be queried. + * @param options Configuration options to massage the returned data. + * @returns A promise that resolves with matched and scored vectors. + */ + public queryById(vectorId: string, options?: VectorizeQueryOptions): Promise; + /** + * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. + * @param vectors List of vectors that will be inserted. + * @returns A promise that resolves with a unique identifier of a mutation containing the insert changeset. + */ + public insert(vectors: VectorizeVector[]): Promise; + /** + * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. + * @param vectors List of vectors that will be upserted. + * @returns A promise that resolves with a unique identifier of a mutation containing the upsert changeset. + */ + public upsert(vectors: VectorizeVector[]): Promise; + /** + * Delete a list of vectors with a matching id. + * @param ids List of vector ids that should be deleted. + * @returns A promise that resolves with a unique identifier of a mutation containing the delete changeset. + */ + public deleteByIds(ids: string[]): Promise; + /** + * Get a list of vectors with a matching id. + * @param ids List of vector ids that should be returned. + * @returns A promise that resolves with the raw unscored vectors matching the id set. + */ + public getByIds(ids: string[]): Promise; +} +/** + * The interface for "version_metadata" binding + * providing metadata about the Worker Version using this binding. + */ +type WorkerVersionMetadata = { + /** The ID of the Worker Version using this binding */ + id: string; + /** The tag of the Worker Version using this binding */ + tag: string; + /** The timestamp of when the Worker Version was uploaded */ + timestamp: string; +}; +interface DynamicDispatchLimits { + /** + * Limit CPU time in milliseconds. + */ + cpuMs?: number; + /** + * Limit number of subrequests. + */ + subRequests?: number; +} +interface DynamicDispatchOptions { + /** + * Limit resources of invoked Worker script. + */ + limits?: DynamicDispatchLimits; + /** + * Arguments for outbound Worker script, if configured. + */ + outbound?: { + [key: string]: any; + }; +} +interface DispatchNamespace { + /** + * @param name Name of the Worker script. + * @param args Arguments to Worker script. + * @param options Options for Dynamic Dispatch invocation. + * @returns A Fetcher object that allows you to send requests to the Worker script. + * @throws If the Worker script does not exist in this dispatch namespace, an error will be thrown. + */ + get(name: string, args?: { + [key: string]: any; + }, options?: DynamicDispatchOptions): Fetcher; +} +declare module 'cloudflare:workflows' { + /** + * NonRetryableError allows for a user to throw a fatal error + * that makes a Workflow instance fail immediately without triggering a retry + */ + export class NonRetryableError extends Error { + public constructor(message: string, name?: string); + } +} +declare abstract class Workflow { + /** + * Get a handle to an existing instance of the Workflow. + * @param id Id for the instance of this Workflow + * @returns A promise that resolves with a handle for the Instance + */ + public get(id: string): Promise; + /** + * Create a new instance and return a handle to it. If a provided id exists, an error will be thrown. + * @param options Options when creating an instance including id and params + * @returns A promise that resolves with a handle for the Instance + */ + public create(options?: WorkflowInstanceCreateOptions): Promise; + /** + * Create a batch of instances and return handle for all of them. If a provided id exists, an error will be thrown. + * `createBatch` is limited at 100 instances at a time or when the RPC limit for the batch (1MiB) is reached. + * @param batch List of Options when creating an instance including name and params + * @returns A promise that resolves with a list of handles for the created instances. + */ + public createBatch(batch: WorkflowInstanceCreateOptions[]): Promise; +} +interface WorkflowInstanceCreateOptions { + /** + * An id for your Workflow instance. Must be unique within the Workflow. + */ + id?: string; + /** + * The event payload the Workflow instance is triggered with + */ + params?: PARAMS; +} +type InstanceStatus = { + status: 'queued' // means that instance is waiting to be started (see concurrency limits) + | 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running + | 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish + | 'waitingForPause' // instance is finishing the current work to pause + | 'unknown'; + error?: string; + output?: object; +}; +interface WorkflowError { + code?: number; + message: string; +} +declare abstract class WorkflowInstance { + public id: string; + /** + * Pause the instance. + */ + public pause(): Promise; + /** + * Resume the instance. If it is already running, an error will be thrown. + */ + public resume(): Promise; + /** + * Terminate the instance. If it is errored, terminated or complete, an error will be thrown. + */ + public terminate(): Promise; + /** + * Restart the instance. + */ + public restart(): Promise; + /** + * Returns the current status of the instance. + */ + public status(): Promise; + /** + * Send an event to this instance. + */ + public sendEvent({ type, payload, }: { + type: string; + payload: unknown; + }): Promise; +} From ca4398385a4edbba8970a3b4ff959c5d6bca2777 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 00:39:49 +0200 Subject: [PATCH 21/29] Remove dependency '@cloudflare/workers-types' --- apps/api/.gitignore | 1 + apps/api/eslint.config.mjs | 9 +- apps/api/package.json | 1 - apps/api/src/cron.ts | 1 - apps/api/src/db/index.ts | 1 - apps/api/src/email.ts | 1 - .../audio/whisper-large-v3-turbo-node.ts | 20 +- apps/api/src/runtime/object-store.ts | 1 - apps/api/tsconfig.json | 2 - apps/api/worker-configuration.d.ts | 5730 ----------------- pnpm-lock.yaml | 3 - 11 files changed, 20 insertions(+), 5750 deletions(-) delete mode 100644 apps/api/worker-configuration.d.ts diff --git a/apps/api/.gitignore b/apps/api/.gitignore index ce8386dc..e31c93df 100644 --- a/apps/api/.gitignore +++ b/apps/api/.gitignore @@ -170,6 +170,7 @@ dist .dev.vars .wrangler/ +worker-configuration.d.ts # macOS .DS_Store diff --git a/apps/api/eslint.config.mjs b/apps/api/eslint.config.mjs index 393124e0..7e867cac 100644 --- a/apps/api/eslint.config.mjs +++ b/apps/api/eslint.config.mjs @@ -6,7 +6,14 @@ import globals from "globals"; import tseslint from "typescript-eslint"; export default defineConfig([ - { ignores: ["dist", ".wrangler/**", "node_modules/**"] }, + { + ignores: [ + "dist", + ".wrangler/**", + "node_modules/**", + "worker-configuration.d.ts", + ], + }, { files: ["**/*.{js,mjs,cjs,ts}"], plugins: { diff --git a/apps/api/package.json b/apps/api/package.json index 9468d629..d062a7fd 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -24,7 +24,6 @@ }, "devDependencies": { "@cloudflare/vitest-pool-workers": "^0.8.58", - "@cloudflare/workers-types": "^4.20250726.0", "@eslint/js": "^9.26.0", "@types/mailparser": "^3.4.6", "@types/node": "^22.15.3", diff --git a/apps/api/src/cron.ts b/apps/api/src/cron.ts index 458af3b4..b63c3c57 100644 --- a/apps/api/src/cron.ts +++ b/apps/api/src/cron.ts @@ -1,4 +1,3 @@ -import { ExecutionContext } from "@cloudflare/workers-types"; import { Node, Workflow as WorkflowType } from "@dafthunk/types"; import CronParser from "cron-parser"; diff --git a/apps/api/src/db/index.ts b/apps/api/src/db/index.ts index 00348fd8..a22154a3 100644 --- a/apps/api/src/db/index.ts +++ b/apps/api/src/db/index.ts @@ -1,4 +1,3 @@ -import type { D1Database } from "@cloudflare/workers-types"; import { drizzle } from "drizzle-orm/d1"; import { type DrizzleD1Database } from "drizzle-orm/d1"; diff --git a/apps/api/src/email.ts b/apps/api/src/email.ts index b3139172..d3a3bae0 100644 --- a/apps/api/src/email.ts +++ b/apps/api/src/email.ts @@ -1,4 +1,3 @@ -import { ExecutionContext } from "@cloudflare/workers-types"; import { Node, Workflow as WorkflowType } from "@dafthunk/types"; import { Bindings } from "./context"; diff --git a/apps/api/src/nodes/audio/whisper-large-v3-turbo-node.ts b/apps/api/src/nodes/audio/whisper-large-v3-turbo-node.ts index 994f906f..7d587717 100644 --- a/apps/api/src/nodes/audio/whisper-large-v3-turbo-node.ts +++ b/apps/api/src/nodes/audio/whisper-large-v3-turbo-node.ts @@ -1,4 +1,3 @@ -import type { Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input } from "@cloudflare/workers-types/experimental"; import { NodeExecution, NodeType } from "@dafthunk/types"; import { NodeContext } from "../types"; @@ -106,17 +105,15 @@ export class WhisperLargeV3TurboNode extends ExecutableNode { const audioBase64 = btoa(String.fromCharCode(...audio.data)); // Prepare the request parameters - const params: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input = { + const params = { audio: audioBase64, + ...(task && { task }), + ...(language && { language }), + ...(vad_filter !== undefined && { vad_filter }), + ...(initial_prompt && { initial_prompt }), + ...(prefix && { prefix }), }; - // Add optional parameters if provided - if (task) params.task = task; - if (language) params.language = language; - if (vad_filter !== undefined) params.vad_filter = vad_filter; - if (initial_prompt) params.initial_prompt = initial_prompt; - if (prefix) params.prefix = prefix; - // Call Cloudflare AI Whisper Large V3 Turbo model const response = await context.env.AI.run( "@cf/openai/whisper-large-v3-turbo", @@ -124,6 +121,11 @@ export class WhisperLargeV3TurboNode extends ExecutableNode { context.env.AI_OPTIONS ); + // Handle streaming response + if (response instanceof ReadableStream) { + throw new Error("Streaming response not supported for whisper model"); + } + // Extract the results const output = { text: response.text, diff --git a/apps/api/src/runtime/object-store.ts b/apps/api/src/runtime/object-store.ts index 637487d7..235d89d8 100644 --- a/apps/api/src/runtime/object-store.ts +++ b/apps/api/src/runtime/object-store.ts @@ -1,4 +1,3 @@ -import type { R2Object } from "@cloudflare/workers-types"; import { ObjectReference, Workflow, WorkflowExecution } from "@dafthunk/types"; import { v7 as uuid } from "uuid"; diff --git a/apps/api/tsconfig.json b/apps/api/tsconfig.json index 709d55a8..27744b58 100644 --- a/apps/api/tsconfig.json +++ b/apps/api/tsconfig.json @@ -34,8 +34,6 @@ // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ "types": [ - "@cloudflare/workers-types/experimental", - "@cloudflare/workers-types/2023-07-01", "@cloudflare/vitest-pool-workers" ] /* Specify type package names to be included without being referenced in a source file. */, // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ diff --git a/apps/api/worker-configuration.d.ts b/apps/api/worker-configuration.d.ts deleted file mode 100644 index 80156f0d..00000000 --- a/apps/api/worker-configuration.d.ts +++ /dev/null @@ -1,5730 +0,0 @@ -// Generated by Wrangler by running `wrangler types` (hash: c25374baea9e1f84be8ff0028404d497) -// Runtime types generated with workerd@1.20250409.0 2024-10-22 nodejs_compat -declare namespace Cloudflare { - interface Env { - KV: KVNamespace; - WEB_HOST: string; - EMAIL_DOMAIN: string; - SECRET_MASTER_KEY: string; - DATASETS_AUTORAG: string; - CLOUDFLARE_ENV: string; - CLOUDFLARE_ACCOUNT_ID: string; - CLOUDFLARE_API_TOKEN: string; - CLOUDFLARE_AI_GATEWAY_ID: string; - JWT_SECRET: string; - GITHUB_CLIENT_ID: string; - GITHUB_CLIENT_SECRET: string; - GOOGLE_CLIENT_ID: string; - GOOGLE_CLIENT_SECRET: string; - TWILIO_ACCOUNT_SID: string; - TWILIO_AUTH_TOKEN: string; - TWILIO_PHONE_NUMBER: string; - SENDGRID_API_KEY: string; - SENDGRID_DEFAULT_FROM: string; - RESEND_API_KEY: string; - RESEND_DEFAULT_FROM: string; - AWS_ACCESS_KEY_ID: string; - AWS_SECRET_ACCESS_KEY: string; - AWS_REGION: string; - SES_DEFAULT_FROM: string; - GEMINI_API_KEY: string; - HUGGINGFACE_API_KEY: string; - WORKFLOW_SESSION: DurableObjectNamespace /* WorkflowSession from dafthunk-api */; - RESSOURCES: R2Bucket; - DATASETS: R2Bucket; - DB: D1Database; - COMPUTE: AnalyticsEngineDataset; - RATE_LIMIT_DEFAULT: RateLimit; - RATE_LIMIT_AUTH: RateLimit; - RATE_LIMIT_EXECUTE: RateLimit; - AI: Ai; - EXECUTE: Workflow; - } -} -interface Env extends Cloudflare.Env {} - -// Begin runtime types -/*! ***************************************************************************** -Copyright (c) Cloudflare. All rights reserved. -Copyright (c) Microsoft Corporation. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this file except in compliance with the License. You may obtain a copy of the -License at http://www.apache.org/licenses/LICENSE-2.0 -THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED -WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, -MERCHANTABLITY OR NON-INFRINGEMENT. -See the Apache Version 2.0 License for specific language governing permissions -and limitations under the License. -***************************************************************************** */ -/* eslint-disable */ -// noinspection JSUnusedGlobalSymbols -declare var onmessage: never; -/** - * An abnormal event (called an exception) which occurs as a result of calling a method or accessing a property of a web API. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException) - */ -declare class DOMException extends Error { - constructor(message?: string, name?: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/message) */ - readonly message: string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/name) */ - readonly name: string; - /** - * @deprecated - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/DOMException/code) - */ - readonly code: number; - static readonly INDEX_SIZE_ERR: number; - static readonly DOMSTRING_SIZE_ERR: number; - static readonly HIERARCHY_REQUEST_ERR: number; - static readonly WRONG_DOCUMENT_ERR: number; - static readonly INVALID_CHARACTER_ERR: number; - static readonly NO_DATA_ALLOWED_ERR: number; - static readonly NO_MODIFICATION_ALLOWED_ERR: number; - static readonly NOT_FOUND_ERR: number; - static readonly NOT_SUPPORTED_ERR: number; - static readonly INUSE_ATTRIBUTE_ERR: number; - static readonly INVALID_STATE_ERR: number; - static readonly SYNTAX_ERR: number; - static readonly INVALID_MODIFICATION_ERR: number; - static readonly NAMESPACE_ERR: number; - static readonly INVALID_ACCESS_ERR: number; - static readonly VALIDATION_ERR: number; - static readonly TYPE_MISMATCH_ERR: number; - static readonly SECURITY_ERR: number; - static readonly NETWORK_ERR: number; - static readonly ABORT_ERR: number; - static readonly URL_MISMATCH_ERR: number; - static readonly QUOTA_EXCEEDED_ERR: number; - static readonly TIMEOUT_ERR: number; - static readonly INVALID_NODE_TYPE_ERR: number; - static readonly DATA_CLONE_ERR: number; - get stack(): any; - set stack(value: any); -} -type WorkerGlobalScopeEventMap = { - fetch: FetchEvent; - scheduled: ScheduledEvent; - queue: QueueEvent; - unhandledrejection: PromiseRejectionEvent; - rejectionhandled: PromiseRejectionEvent; -}; -declare abstract class WorkerGlobalScope extends EventTarget { - EventTarget: typeof EventTarget; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console) */ -interface Console { - "assert"(condition?: boolean, ...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/clear_static) */ - clear(): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/count_static) */ - count(label?: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/countreset_static) */ - countReset(label?: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/debug_static) */ - debug(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dir_static) */ - dir(item?: any, options?: any): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/dirxml_static) */ - dirxml(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/error_static) */ - error(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/group_static) */ - group(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupcollapsed_static) */ - groupCollapsed(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/groupend_static) */ - groupEnd(): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/info_static) */ - info(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/log_static) */ - log(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/table_static) */ - table(tabularData?: any, properties?: string[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/time_static) */ - time(label?: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timeend_static) */ - timeEnd(label?: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/timelog_static) */ - timeLog(label?: string, ...data: any[]): void; - timeStamp(label?: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/trace_static) */ - trace(...data: any[]): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/console/warn_static) */ - warn(...data: any[]): void; -} -declare const console: Console; -type BufferSource = ArrayBufferView | ArrayBuffer; -type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array | BigInt64Array | BigUint64Array; -declare namespace WebAssembly { - class CompileError extends Error { - constructor(message?: string); - } - class RuntimeError extends Error { - constructor(message?: string); - } - type ValueType = "anyfunc" | "externref" | "f32" | "f64" | "i32" | "i64" | "v128"; - interface GlobalDescriptor { - value: ValueType; - mutable?: boolean; - } - class Global { - constructor(descriptor: GlobalDescriptor, value?: any); - value: any; - valueOf(): any; - } - type ImportValue = ExportValue | number; - type ModuleImports = Record; - type Imports = Record; - type ExportValue = Function | Global | Memory | Table; - type Exports = Record; - class Instance { - constructor(module: Module, imports?: Imports); - readonly exports: Exports; - } - interface MemoryDescriptor { - initial: number; - maximum?: number; - shared?: boolean; - } - class Memory { - constructor(descriptor: MemoryDescriptor); - readonly buffer: ArrayBuffer; - grow(delta: number): number; - } - type ImportExportKind = "function" | "global" | "memory" | "table"; - interface ModuleExportDescriptor { - kind: ImportExportKind; - name: string; - } - interface ModuleImportDescriptor { - kind: ImportExportKind; - module: string; - name: string; - } - abstract class Module { - static customSections(module: Module, sectionName: string): ArrayBuffer[]; - static exports(module: Module): ModuleExportDescriptor[]; - static imports(module: Module): ModuleImportDescriptor[]; - } - type TableKind = "anyfunc" | "externref"; - interface TableDescriptor { - element: TableKind; - initial: number; - maximum?: number; - } - class Table { - constructor(descriptor: TableDescriptor, value?: any); - readonly length: number; - get(index: number): any; - grow(delta: number, value?: any): number; - set(index: number, value?: any): void; - } - function instantiate(module: Module, imports?: Imports): Promise; - function validate(bytes: BufferSource): boolean; -} -/** - * This ServiceWorker API interface represents the global execution context of a service worker. - * Available only in secure contexts. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ServiceWorkerGlobalScope) - */ -interface ServiceWorkerGlobalScope extends WorkerGlobalScope { - DOMException: typeof DOMException; - WorkerGlobalScope: typeof WorkerGlobalScope; - btoa(data: string): string; - atob(data: string): string; - setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; - setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; - clearTimeout(timeoutId: number | null): void; - setInterval(callback: (...args: any[]) => void, msDelay?: number): number; - setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; - clearInterval(timeoutId: number | null): void; - queueMicrotask(task: Function): void; - structuredClone(value: T, options?: StructuredSerializeOptions): T; - reportError(error: any): void; - fetch(input: RequestInfo | URL, init?: RequestInit): Promise; - self: ServiceWorkerGlobalScope; - crypto: Crypto; - caches: CacheStorage; - scheduler: Scheduler; - performance: Performance; - Cloudflare: Cloudflare; - readonly origin: string; - Event: typeof Event; - ExtendableEvent: typeof ExtendableEvent; - CustomEvent: typeof CustomEvent; - PromiseRejectionEvent: typeof PromiseRejectionEvent; - FetchEvent: typeof FetchEvent; - TailEvent: typeof TailEvent; - TraceEvent: typeof TailEvent; - ScheduledEvent: typeof ScheduledEvent; - MessageEvent: typeof MessageEvent; - CloseEvent: typeof CloseEvent; - ReadableStreamDefaultReader: typeof ReadableStreamDefaultReader; - ReadableStreamBYOBReader: typeof ReadableStreamBYOBReader; - ReadableStream: typeof ReadableStream; - WritableStream: typeof WritableStream; - WritableStreamDefaultWriter: typeof WritableStreamDefaultWriter; - TransformStream: typeof TransformStream; - ByteLengthQueuingStrategy: typeof ByteLengthQueuingStrategy; - CountQueuingStrategy: typeof CountQueuingStrategy; - ErrorEvent: typeof ErrorEvent; - EventSource: typeof EventSource; - ReadableStreamBYOBRequest: typeof ReadableStreamBYOBRequest; - ReadableStreamDefaultController: typeof ReadableStreamDefaultController; - ReadableByteStreamController: typeof ReadableByteStreamController; - WritableStreamDefaultController: typeof WritableStreamDefaultController; - TransformStreamDefaultController: typeof TransformStreamDefaultController; - CompressionStream: typeof CompressionStream; - DecompressionStream: typeof DecompressionStream; - TextEncoderStream: typeof TextEncoderStream; - TextDecoderStream: typeof TextDecoderStream; - Headers: typeof Headers; - Body: typeof Body; - Request: typeof Request; - Response: typeof Response; - WebSocket: typeof WebSocket; - WebSocketPair: typeof WebSocketPair; - WebSocketRequestResponsePair: typeof WebSocketRequestResponsePair; - AbortController: typeof AbortController; - AbortSignal: typeof AbortSignal; - TextDecoder: typeof TextDecoder; - TextEncoder: typeof TextEncoder; - navigator: Navigator; - Navigator: typeof Navigator; - URL: typeof URL; - URLSearchParams: typeof URLSearchParams; - URLPattern: typeof URLPattern; - Blob: typeof Blob; - File: typeof File; - FormData: typeof FormData; - Crypto: typeof Crypto; - SubtleCrypto: typeof SubtleCrypto; - CryptoKey: typeof CryptoKey; - CacheStorage: typeof CacheStorage; - Cache: typeof Cache; - FixedLengthStream: typeof FixedLengthStream; - IdentityTransformStream: typeof IdentityTransformStream; - HTMLRewriter: typeof HTMLRewriter; -} -declare function addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; -declare function removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; -/** - * Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) - */ -declare function dispatchEvent(event: WorkerGlobalScopeEventMap[keyof WorkerGlobalScopeEventMap]): boolean; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/btoa) */ -declare function btoa(data: string): string; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Window/atob) */ -declare function atob(data: string): string; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setTimeout) */ -declare function setTimeout(callback: (...args: any[]) => void, msDelay?: number): number; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setTimeout) */ -declare function setTimeout(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/clearTimeout) */ -declare function clearTimeout(timeoutId: number | null): void; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setInterval) */ -declare function setInterval(callback: (...args: any[]) => void, msDelay?: number): number; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/setInterval) */ -declare function setInterval(callback: (...args: Args) => void, msDelay?: number, ...args: Args): number; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/clearInterval) */ -declare function clearInterval(timeoutId: number | null): void; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/queueMicrotask) */ -declare function queueMicrotask(task: Function): void; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/structuredClone) */ -declare function structuredClone(value: T, options?: StructuredSerializeOptions): T; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/reportError) */ -declare function reportError(error: any): void; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/fetch) */ -declare function fetch(input: RequestInfo | URL, init?: RequestInit): Promise; -declare const self: ServiceWorkerGlobalScope; -/** -* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. -* The Workers runtime implements the full surface of this API, but with some differences in -* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) -* compared to those implemented in most browsers. -* -* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) -*/ -declare const crypto: Crypto; -/** -* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. -* -* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) -*/ -declare const caches: CacheStorage; -declare const scheduler: Scheduler; -/** -* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, -* as well as timing of subrequests and other operations. -* -* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) -*/ -declare const performance: Performance; -declare const Cloudflare: Cloudflare; -declare const origin: string; -declare const navigator: Navigator; -interface TestController { -} -interface ExecutionContext { - waitUntil(promise: Promise): void; - passThroughOnException(): void; - props: any; -} -type ExportedHandlerFetchHandler = (request: Request>, env: Env, ctx: ExecutionContext) => Response | Promise; -type ExportedHandlerTailHandler = (events: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; -type ExportedHandlerTraceHandler = (traces: TraceItem[], env: Env, ctx: ExecutionContext) => void | Promise; -type ExportedHandlerTailStreamHandler = (event: TailStream.TailEvent, env: Env, ctx: ExecutionContext) => TailStream.TailEventHandlerType | Promise; -type ExportedHandlerScheduledHandler = (controller: ScheduledController, env: Env, ctx: ExecutionContext) => void | Promise; -type ExportedHandlerQueueHandler = (batch: MessageBatch, env: Env, ctx: ExecutionContext) => void | Promise; -type ExportedHandlerTestHandler = (controller: TestController, env: Env, ctx: ExecutionContext) => void | Promise; -interface ExportedHandler { - fetch?: ExportedHandlerFetchHandler; - tail?: ExportedHandlerTailHandler; - trace?: ExportedHandlerTraceHandler; - tailStream?: ExportedHandlerTailStreamHandler; - scheduled?: ExportedHandlerScheduledHandler; - test?: ExportedHandlerTestHandler; - email?: EmailExportedHandler; - queue?: ExportedHandlerQueueHandler; -} -interface StructuredSerializeOptions { - transfer?: any[]; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent) */ -declare abstract class PromiseRejectionEvent extends Event { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/promise) */ - readonly promise: Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/PromiseRejectionEvent/reason) */ - readonly reason: any; -} -declare abstract class Navigator { - sendBeacon(url: string, body?: (ReadableStream | string | (ArrayBuffer | ArrayBufferView) | Blob | FormData | URLSearchParams | URLSearchParams)): boolean; - readonly userAgent: string; - readonly hardwareConcurrency: number; -} -/** -* The Workers runtime supports a subset of the Performance API, used to measure timing and performance, -* as well as timing of subrequests and other operations. -* -* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/) -*/ -interface Performance { - /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancetimeorigin) */ - readonly timeOrigin: number; - /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/performance/#performancenow) */ - now(): number; -} -interface AlarmInvocationInfo { - readonly isRetry: boolean; - readonly retryCount: number; -} -interface Cloudflare { - readonly compatibilityFlags: Record; -} -interface DurableObject { - fetch(request: Request): Response | Promise; - alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; - webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; - webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; - webSocketError?(ws: WebSocket, error: unknown): void | Promise; -} -type DurableObjectStub = Fetcher & { - readonly id: DurableObjectId; - readonly name?: string; -}; -interface DurableObjectId { - toString(): string; - equals(other: DurableObjectId): boolean; - readonly name?: string; -} -interface DurableObjectNamespace { - newUniqueId(options?: DurableObjectNamespaceNewUniqueIdOptions): DurableObjectId; - idFromName(name: string): DurableObjectId; - idFromString(id: string): DurableObjectId; - get(id: DurableObjectId, options?: DurableObjectNamespaceGetDurableObjectOptions): DurableObjectStub; - jurisdiction(jurisdiction: DurableObjectJurisdiction): DurableObjectNamespace; -} -type DurableObjectJurisdiction = "eu" | "fedramp"; -interface DurableObjectNamespaceNewUniqueIdOptions { - jurisdiction?: DurableObjectJurisdiction; -} -type DurableObjectLocationHint = "wnam" | "enam" | "sam" | "weur" | "eeur" | "apac" | "oc" | "afr" | "me"; -interface DurableObjectNamespaceGetDurableObjectOptions { - locationHint?: DurableObjectLocationHint; -} -interface DurableObjectState { - waitUntil(promise: Promise): void; - readonly id: DurableObjectId; - readonly storage: DurableObjectStorage; - container?: Container; - blockConcurrencyWhile(callback: () => Promise): Promise; - acceptWebSocket(ws: WebSocket, tags?: string[]): void; - getWebSockets(tag?: string): WebSocket[]; - setWebSocketAutoResponse(maybeReqResp?: WebSocketRequestResponsePair): void; - getWebSocketAutoResponse(): WebSocketRequestResponsePair | null; - getWebSocketAutoResponseTimestamp(ws: WebSocket): Date | null; - setHibernatableWebSocketEventTimeout(timeoutMs?: number): void; - getHibernatableWebSocketEventTimeout(): number | null; - getTags(ws: WebSocket): string[]; - abort(reason?: string): void; -} -interface DurableObjectTransaction { - get(key: string, options?: DurableObjectGetOptions): Promise; - get(keys: string[], options?: DurableObjectGetOptions): Promise>; - list(options?: DurableObjectListOptions): Promise>; - put(key: string, value: T, options?: DurableObjectPutOptions): Promise; - put(entries: Record, options?: DurableObjectPutOptions): Promise; - delete(key: string, options?: DurableObjectPutOptions): Promise; - delete(keys: string[], options?: DurableObjectPutOptions): Promise; - rollback(): void; - getAlarm(options?: DurableObjectGetAlarmOptions): Promise; - setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; - deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; -} -interface DurableObjectStorage { - get(key: string, options?: DurableObjectGetOptions): Promise; - get(keys: string[], options?: DurableObjectGetOptions): Promise>; - list(options?: DurableObjectListOptions): Promise>; - put(key: string, value: T, options?: DurableObjectPutOptions): Promise; - put(entries: Record, options?: DurableObjectPutOptions): Promise; - delete(key: string, options?: DurableObjectPutOptions): Promise; - delete(keys: string[], options?: DurableObjectPutOptions): Promise; - deleteAll(options?: DurableObjectPutOptions): Promise; - transaction(closure: (txn: DurableObjectTransaction) => Promise): Promise; - getAlarm(options?: DurableObjectGetAlarmOptions): Promise; - setAlarm(scheduledTime: number | Date, options?: DurableObjectSetAlarmOptions): Promise; - deleteAlarm(options?: DurableObjectSetAlarmOptions): Promise; - sync(): Promise; - sql: SqlStorage; - transactionSync(closure: () => T): T; - getCurrentBookmark(): Promise; - getBookmarkForTime(timestamp: number | Date): Promise; - onNextSessionRestoreBookmark(bookmark: string): Promise; -} -interface DurableObjectListOptions { - start?: string; - startAfter?: string; - end?: string; - prefix?: string; - reverse?: boolean; - limit?: number; - allowConcurrency?: boolean; - noCache?: boolean; -} -interface DurableObjectGetOptions { - allowConcurrency?: boolean; - noCache?: boolean; -} -interface DurableObjectGetAlarmOptions { - allowConcurrency?: boolean; -} -interface DurableObjectPutOptions { - allowConcurrency?: boolean; - allowUnconfirmed?: boolean; - noCache?: boolean; -} -interface DurableObjectSetAlarmOptions { - allowConcurrency?: boolean; - allowUnconfirmed?: boolean; -} -declare class WebSocketRequestResponsePair { - constructor(request: string, response: string); - get request(): string; - get response(): string; -} -interface AnalyticsEngineDataset { - writeDataPoint(event?: AnalyticsEngineDataPoint): void; -} -interface AnalyticsEngineDataPoint { - indexes?: ((ArrayBuffer | string) | null)[]; - doubles?: number[]; - blobs?: ((ArrayBuffer | string) | null)[]; -} -/** - * An event which takes place in the DOM. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event) - */ -declare class Event { - constructor(type: string, init?: EventInit); - /** - * Returns the type of event, e.g. "click", "hashchange", or "submit". - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/type) - */ - get type(): string; - /** - * Returns the event's phase, which is one of NONE, CAPTURING_PHASE, AT_TARGET, and BUBBLING_PHASE. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/eventPhase) - */ - get eventPhase(): number; - /** - * Returns true or false depending on how event was initialized. True if event invokes listeners past a ShadowRoot node that is the root of its target, and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composed) - */ - get composed(): boolean; - /** - * Returns true or false depending on how event was initialized. True if event goes through its target's ancestors in reverse tree order, and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/bubbles) - */ - get bubbles(): boolean; - /** - * Returns true or false depending on how event was initialized. Its return value does not always carry meaning, but true can indicate that part of the operation during which event was dispatched, can be canceled by invoking the preventDefault() method. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelable) - */ - get cancelable(): boolean; - /** - * Returns true if preventDefault() was invoked successfully to indicate cancelation, and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/defaultPrevented) - */ - get defaultPrevented(): boolean; - /** - * @deprecated - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/returnValue) - */ - get returnValue(): boolean; - /** - * Returns the object whose event listener's callback is currently being invoked. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/currentTarget) - */ - get currentTarget(): EventTarget | undefined; - /** - * Returns the object to which event is dispatched (its target). - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/target) - */ - get target(): EventTarget | undefined; - /** - * @deprecated - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/srcElement) - */ - get srcElement(): EventTarget | undefined; - /** - * Returns the event's timestamp as the number of milliseconds measured relative to the time origin. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/timeStamp) - */ - get timeStamp(): number; - /** - * Returns true if event was dispatched by the user agent, and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/isTrusted) - */ - get isTrusted(): boolean; - /** - * @deprecated - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) - */ - get cancelBubble(): boolean; - /** - * @deprecated - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/cancelBubble) - */ - set cancelBubble(value: boolean); - /** - * Invoking this method prevents event from reaching any registered event listeners after the current one finishes running and, when dispatched in a tree, also prevents event from reaching any other objects. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopImmediatePropagation) - */ - stopImmediatePropagation(): void; - /** - * If invoked when the cancelable attribute value is true, and while executing a listener for the event with passive set to false, signals to the operation that caused event to be dispatched that it needs to be canceled. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/preventDefault) - */ - preventDefault(): void; - /** - * When dispatched in a tree, invoking this method prevents event from reaching any objects other than the current object. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/stopPropagation) - */ - stopPropagation(): void; - /** - * Returns the invocation target objects of event's path (objects on which listeners will be invoked), except for any nodes in shadow trees of which the shadow root's mode is "closed" that are not reachable from event's currentTarget. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Event/composedPath) - */ - composedPath(): EventTarget[]; - static readonly NONE: number; - static readonly CAPTURING_PHASE: number; - static readonly AT_TARGET: number; - static readonly BUBBLING_PHASE: number; -} -interface EventInit { - bubbles?: boolean; - cancelable?: boolean; - composed?: boolean; -} -type EventListener = (event: EventType) => void; -interface EventListenerObject { - handleEvent(event: EventType): void; -} -type EventListenerOrEventListenerObject = EventListener | EventListenerObject; -/** - * EventTarget is a DOM interface implemented by objects that can receive events and may have listeners for them. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget) - */ -declare class EventTarget = Record> { - constructor(); - /** - * Appends an event listener for events whose type attribute value is type. The callback argument sets the callback that will be invoked when the event is dispatched. - * - * The options argument sets listener-specific options. For compatibility this can be a boolean, in which case the method behaves exactly as if the value was specified as options's capture. - * - * When set to true, options's capture prevents callback from being invoked when the event's eventPhase attribute value is BUBBLING_PHASE. When false (or not present), callback will not be invoked when event's eventPhase attribute value is CAPTURING_PHASE. Either way, callback will be invoked if event's eventPhase attribute value is AT_TARGET. - * - * When set to true, options's passive indicates that the callback will not cancel the event by invoking preventDefault(). This is used to enable performance optimizations described in § 2.8 Observing event listeners. - * - * When set to true, options's once indicates that the callback will only be invoked once after which the event listener will be removed. - * - * If an AbortSignal is passed for options's signal, then the event listener will be removed when signal is aborted. - * - * The event listener is appended to target's event listener list and is not appended if it has the same type, callback, and capture. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/addEventListener) - */ - addEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetAddEventListenerOptions | boolean): void; - /** - * Removes the event listener in target's event listener list with the same type, callback, and options. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/removeEventListener) - */ - removeEventListener(type: Type, handler: EventListenerOrEventListenerObject, options?: EventTargetEventListenerOptions | boolean): void; - /** - * Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventTarget/dispatchEvent) - */ - dispatchEvent(event: EventMap[keyof EventMap]): boolean; -} -interface EventTargetEventListenerOptions { - capture?: boolean; -} -interface EventTargetAddEventListenerOptions { - capture?: boolean; - passive?: boolean; - once?: boolean; - signal?: AbortSignal; -} -interface EventTargetHandlerObject { - handleEvent: (event: Event) => any | undefined; -} -/** - * A controller object that allows you to abort one or more DOM requests as and when desired. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController) - */ -declare class AbortController { - constructor(); - /** - * Returns the AbortSignal object associated with this object. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/signal) - */ - get signal(): AbortSignal; - /** - * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController/abort) - */ - abort(reason?: any): void; -} -/** - * A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal) - */ -declare abstract class AbortSignal extends EventTarget { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_static) */ - static abort(reason?: any): AbortSignal; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/timeout_static) */ - static timeout(delay: number): AbortSignal; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/any_static) */ - static any(signals: AbortSignal[]): AbortSignal; - /** - * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/aborted) - */ - get aborted(): boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/reason) */ - get reason(): any; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ - get onabort(): any | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/abort_event) */ - set onabort(value: any | null); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortSignal/throwIfAborted) */ - throwIfAborted(): void; -} -interface Scheduler { - wait(delay: number, maybeOptions?: SchedulerWaitOptions): Promise; -} -interface SchedulerWaitOptions { - signal?: AbortSignal; -} -/** - * Extends the lifetime of the install and activate events dispatched on the global scope as part of the service worker lifecycle. This ensures that any functional events (like FetchEvent) are not dispatched until it upgrades database schemas and deletes the outdated cache entries. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent) - */ -declare abstract class ExtendableEvent extends Event { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ExtendableEvent/waitUntil) */ - waitUntil(promise: Promise): void; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent) */ -declare class CustomEvent extends Event { - constructor(type: string, init?: CustomEventCustomEventInit); - /** - * Returns any custom data event was created with. Typically used for synthetic events. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CustomEvent/detail) - */ - get detail(): T; -} -interface CustomEventCustomEventInit { - bubbles?: boolean; - cancelable?: boolean; - composed?: boolean; - detail?: any; -} -/** - * A file-like object of immutable, raw data. Blobs represent data that isn't necessarily in a JavaScript-native format. The File interface is based on Blob, inheriting blob functionality and expanding it to support files on the user's system. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob) - */ -declare class Blob { - constructor(type?: ((ArrayBuffer | ArrayBufferView) | string | Blob)[], options?: BlobOptions); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/size) */ - get size(): number; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/type) */ - get type(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/slice) */ - slice(start?: number, end?: number, type?: string): Blob; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/arrayBuffer) */ - arrayBuffer(): Promise; - bytes(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/text) */ - text(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Blob/stream) */ - stream(): ReadableStream; -} -interface BlobOptions { - type?: string; -} -/** - * Provides information about files and allows JavaScript in a web page to access their content. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/File) - */ -declare class File extends Blob { - constructor(bits: ((ArrayBuffer | ArrayBufferView) | string | Blob)[] | undefined, name: string, options?: FileOptions); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/name) */ - get name(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/File/lastModified) */ - get lastModified(): number; -} -interface FileOptions { - type?: string; - lastModified?: number; -} -/** -* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. -* -* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) -*/ -declare abstract class CacheStorage { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CacheStorage/open) */ - open(cacheName: string): Promise; - readonly default: Cache; -} -/** -* The Cache API allows fine grained control of reading and writing from the Cloudflare global network cache. -* -* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/) -*/ -declare abstract class Cache { - /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#delete) */ - delete(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; - /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#match) */ - match(request: RequestInfo | URL, options?: CacheQueryOptions): Promise; - /* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/cache/#put) */ - put(request: RequestInfo | URL, response: Response): Promise; -} -interface CacheQueryOptions { - ignoreMethod?: boolean; -} -/** -* The Web Crypto API provides a set of low-level functions for common cryptographic tasks. -* The Workers runtime implements the full surface of this API, but with some differences in -* the [supported algorithms](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/#supported-algorithms) -* compared to those implemented in most browsers. -* -* [Cloudflare Docs Reference](https://developers.cloudflare.com/workers/runtime-apis/web-crypto/) -*/ -declare abstract class Crypto { - /** - * Available only in secure contexts. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/subtle) - */ - get subtle(): SubtleCrypto; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/getRandomValues) */ - getRandomValues(buffer: T): T; - /** - * Available only in secure contexts. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Crypto/randomUUID) - */ - randomUUID(): string; - DigestStream: typeof DigestStream; -} -/** - * This Web Crypto API interface provides a number of low-level cryptographic functions. It is accessed via the Crypto.subtle properties available in a window context (via Window.crypto). - * Available only in secure contexts. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto) - */ -declare abstract class SubtleCrypto { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/encrypt) */ - encrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, plainText: ArrayBuffer | ArrayBufferView): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/decrypt) */ - decrypt(algorithm: string | SubtleCryptoEncryptAlgorithm, key: CryptoKey, cipherText: ArrayBuffer | ArrayBufferView): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/sign) */ - sign(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, data: ArrayBuffer | ArrayBufferView): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/verify) */ - verify(algorithm: string | SubtleCryptoSignAlgorithm, key: CryptoKey, signature: ArrayBuffer | ArrayBufferView, data: ArrayBuffer | ArrayBufferView): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/digest) */ - digest(algorithm: string | SubtleCryptoHashAlgorithm, data: ArrayBuffer | ArrayBufferView): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey) */ - generateKey(algorithm: string | SubtleCryptoGenerateKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey) */ - deriveKey(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, derivedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveBits) */ - deriveBits(algorithm: string | SubtleCryptoDeriveKeyAlgorithm, baseKey: CryptoKey, length?: number | null): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey) */ - importKey(format: string, keyData: (ArrayBuffer | ArrayBufferView) | JsonWebKey, algorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/exportKey) */ - exportKey(format: string, key: CryptoKey): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/wrapKey) */ - wrapKey(format: string, key: CryptoKey, wrappingKey: CryptoKey, wrapAlgorithm: string | SubtleCryptoEncryptAlgorithm): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey) */ - unwrapKey(format: string, wrappedKey: ArrayBuffer | ArrayBufferView, unwrappingKey: CryptoKey, unwrapAlgorithm: string | SubtleCryptoEncryptAlgorithm, unwrappedKeyAlgorithm: string | SubtleCryptoImportKeyAlgorithm, extractable: boolean, keyUsages: string[]): Promise; - timingSafeEqual(a: ArrayBuffer | ArrayBufferView, b: ArrayBuffer | ArrayBufferView): boolean; -} -/** - * The CryptoKey dictionary of the Web Crypto API represents a cryptographic key. - * Available only in secure contexts. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey) - */ -declare abstract class CryptoKey { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/type) */ - readonly type: string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/extractable) */ - readonly extractable: boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/algorithm) */ - readonly algorithm: CryptoKeyKeyAlgorithm | CryptoKeyAesKeyAlgorithm | CryptoKeyHmacKeyAlgorithm | CryptoKeyRsaKeyAlgorithm | CryptoKeyEllipticKeyAlgorithm | CryptoKeyArbitraryKeyAlgorithm; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CryptoKey/usages) */ - readonly usages: string[]; -} -interface CryptoKeyPair { - publicKey: CryptoKey; - privateKey: CryptoKey; -} -interface JsonWebKey { - kty: string; - use?: string; - key_ops?: string[]; - alg?: string; - ext?: boolean; - crv?: string; - x?: string; - y?: string; - d?: string; - n?: string; - e?: string; - p?: string; - q?: string; - dp?: string; - dq?: string; - qi?: string; - oth?: RsaOtherPrimesInfo[]; - k?: string; -} -interface RsaOtherPrimesInfo { - r?: string; - d?: string; - t?: string; -} -interface SubtleCryptoDeriveKeyAlgorithm { - name: string; - salt?: (ArrayBuffer | ArrayBufferView); - iterations?: number; - hash?: (string | SubtleCryptoHashAlgorithm); - $public?: CryptoKey; - info?: (ArrayBuffer | ArrayBufferView); -} -interface SubtleCryptoEncryptAlgorithm { - name: string; - iv?: (ArrayBuffer | ArrayBufferView); - additionalData?: (ArrayBuffer | ArrayBufferView); - tagLength?: number; - counter?: (ArrayBuffer | ArrayBufferView); - length?: number; - label?: (ArrayBuffer | ArrayBufferView); -} -interface SubtleCryptoGenerateKeyAlgorithm { - name: string; - hash?: (string | SubtleCryptoHashAlgorithm); - modulusLength?: number; - publicExponent?: (ArrayBuffer | ArrayBufferView); - length?: number; - namedCurve?: string; -} -interface SubtleCryptoHashAlgorithm { - name: string; -} -interface SubtleCryptoImportKeyAlgorithm { - name: string; - hash?: (string | SubtleCryptoHashAlgorithm); - length?: number; - namedCurve?: string; - compressed?: boolean; -} -interface SubtleCryptoSignAlgorithm { - name: string; - hash?: (string | SubtleCryptoHashAlgorithm); - dataLength?: number; - saltLength?: number; -} -interface CryptoKeyKeyAlgorithm { - name: string; -} -interface CryptoKeyAesKeyAlgorithm { - name: string; - length: number; -} -interface CryptoKeyHmacKeyAlgorithm { - name: string; - hash: CryptoKeyKeyAlgorithm; - length: number; -} -interface CryptoKeyRsaKeyAlgorithm { - name: string; - modulusLength: number; - publicExponent: ArrayBuffer | ArrayBufferView; - hash?: CryptoKeyKeyAlgorithm; -} -interface CryptoKeyEllipticKeyAlgorithm { - name: string; - namedCurve: string; -} -interface CryptoKeyArbitraryKeyAlgorithm { - name: string; - hash?: CryptoKeyKeyAlgorithm; - namedCurve?: string; - length?: number; -} -declare class DigestStream extends WritableStream { - constructor(algorithm: string | SubtleCryptoHashAlgorithm); - readonly digest: Promise; - get bytesWritten(): number | bigint; -} -/** - * A decoder for a specific method, that is a specific character encoding, like utf-8, iso-8859-2, koi8, cp1261, gbk, etc. A decoder takes a stream of bytes as input and emits a stream of code points. For a more scalable, non-native library, see StringView – a C-like representation of strings based on typed arrays. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder) - */ -declare class TextDecoder { - constructor(label?: string, options?: TextDecoderConstructorOptions); - /** - * Returns the result of running encoding's decoder. The method can be invoked zero or more times with options's stream set to true, and then once without options's stream (or set to false), to process a fragmented input. If the invocation without options's stream (or set to false) has no input, it's clearest to omit both arguments. - * - * ``` - * var string = "", decoder = new TextDecoder(encoding), buffer; - * while(buffer = next_chunk()) { - * string += decoder.decode(buffer, {stream:true}); - * } - * string += decoder.decode(); // end-of-queue - * ``` - * - * If the error mode is "fatal" and encoding's decoder returns error, throws a TypeError. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder/decode) - */ - decode(input?: (ArrayBuffer | ArrayBufferView), options?: TextDecoderDecodeOptions): string; - get encoding(): string; - get fatal(): boolean; - get ignoreBOM(): boolean; -} -/** - * TextEncoder takes a stream of code points as input and emits a stream of bytes. For a more scalable, non-native library, see StringView – a C-like representation of strings based on typed arrays. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder) - */ -declare class TextEncoder { - constructor(); - /** - * Returns the result of running UTF-8's encoder. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encode) - */ - encode(input?: string): Uint8Array; - /** - * Runs the UTF-8 encoder on source, stores the result of that operation into destination, and returns the progress made as an object wherein read is the number of converted code units of source and written is the number of bytes modified in destination. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoder/encodeInto) - */ - encodeInto(input: string, buffer: ArrayBuffer | ArrayBufferView): TextEncoderEncodeIntoResult; - get encoding(): string; -} -interface TextDecoderConstructorOptions { - fatal: boolean; - ignoreBOM: boolean; -} -interface TextDecoderDecodeOptions { - stream: boolean; -} -interface TextEncoderEncodeIntoResult { - read: number; - written: number; -} -/** - * Events providing information related to errors in scripts or in files. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ErrorEvent) - */ -declare class ErrorEvent extends Event { - constructor(type: string, init?: ErrorEventErrorEventInit); - get filename(): string; - get message(): string; - get lineno(): number; - get colno(): number; - get error(): any; -} -interface ErrorEventErrorEventInit { - message?: string; - filename?: string; - lineno?: number; - colno?: number; - error?: any; -} -/** - * Provides a way to easily construct a set of key/value pairs representing form fields and their values, which can then be easily sent using the XMLHttpRequest.send() method. It uses the same format a form would use if the encoding type were set to "multipart/form-data". - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData) - */ -declare class FormData { - constructor(); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) */ - append(name: string, value: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/append) */ - append(name: string, value: Blob, filename?: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/delete) */ - delete(name: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/get) */ - get(name: string): (File | string) | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/getAll) */ - getAll(name: string): (File | string)[]; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/has) */ - has(name: string): boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) */ - set(name: string, value: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FormData/set) */ - set(name: string, value: Blob, filename?: string): void; - /* Returns an array of key, value pairs for every entry in the list. */ - entries(): IterableIterator<[ - key: string, - value: File | string - ]>; - /* Returns a list of keys in the list. */ - keys(): IterableIterator; - /* Returns a list of values in the list. */ - values(): IterableIterator<(File | string)>; - forEach(callback: (this: This, value: File | string, key: string, parent: FormData) => void, thisArg?: This): void; - [Symbol.iterator](): IterableIterator<[ - key: string, - value: File | string - ]>; -} -interface ContentOptions { - html?: boolean; -} -declare class HTMLRewriter { - constructor(); - on(selector: string, handlers: HTMLRewriterElementContentHandlers): HTMLRewriter; - onDocument(handlers: HTMLRewriterDocumentContentHandlers): HTMLRewriter; - transform(response: Response): Response; -} -interface HTMLRewriterElementContentHandlers { - element?(element: Element): void | Promise; - comments?(comment: Comment): void | Promise; - text?(element: Text): void | Promise; -} -interface HTMLRewriterDocumentContentHandlers { - doctype?(doctype: Doctype): void | Promise; - comments?(comment: Comment): void | Promise; - text?(text: Text): void | Promise; - end?(end: DocumentEnd): void | Promise; -} -interface Doctype { - readonly name: string | null; - readonly publicId: string | null; - readonly systemId: string | null; -} -interface Element { - tagName: string; - readonly attributes: IterableIterator; - readonly removed: boolean; - readonly namespaceURI: string; - getAttribute(name: string): string | null; - hasAttribute(name: string): boolean; - setAttribute(name: string, value: string): Element; - removeAttribute(name: string): Element; - before(content: string | ReadableStream | Response, options?: ContentOptions): Element; - after(content: string | ReadableStream | Response, options?: ContentOptions): Element; - prepend(content: string | ReadableStream | Response, options?: ContentOptions): Element; - append(content: string | ReadableStream | Response, options?: ContentOptions): Element; - replace(content: string | ReadableStream | Response, options?: ContentOptions): Element; - remove(): Element; - removeAndKeepContent(): Element; - setInnerContent(content: string | ReadableStream | Response, options?: ContentOptions): Element; - onEndTag(handler: (tag: EndTag) => void | Promise): void; -} -interface EndTag { - name: string; - before(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; - after(content: string | ReadableStream | Response, options?: ContentOptions): EndTag; - remove(): EndTag; -} -interface Comment { - text: string; - readonly removed: boolean; - before(content: string, options?: ContentOptions): Comment; - after(content: string, options?: ContentOptions): Comment; - replace(content: string, options?: ContentOptions): Comment; - remove(): Comment; -} -interface Text { - readonly text: string; - readonly lastInTextNode: boolean; - readonly removed: boolean; - before(content: string | ReadableStream | Response, options?: ContentOptions): Text; - after(content: string | ReadableStream | Response, options?: ContentOptions): Text; - replace(content: string | ReadableStream | Response, options?: ContentOptions): Text; - remove(): Text; -} -interface DocumentEnd { - append(content: string, options?: ContentOptions): DocumentEnd; -} -/** - * This is the event type for fetch events dispatched on the service worker global scope. It contains information about the fetch, including the request and how the receiver will treat the response. It provides the event.respondWith() method, which allows us to provide a response to this fetch. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent) - */ -declare abstract class FetchEvent extends ExtendableEvent { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/request) */ - readonly request: Request; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/FetchEvent/respondWith) */ - respondWith(promise: Response | Promise): void; - passThroughOnException(): void; -} -type HeadersInit = Headers | Iterable> | Record; -/** - * This Fetch API interface allows you to perform various actions on HTTP request and response headers. These actions include retrieving, setting, adding to, and removing. A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs.  You can add to this using methods like append() (see Examples.) In all methods of this interface, header names are matched by case-insensitive byte sequence. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers) - */ -declare class Headers { - constructor(init?: HeadersInit); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/get) */ - get(name: string): string | null; - getAll(name: string): string[]; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/getSetCookie) */ - getSetCookie(): string[]; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/has) */ - has(name: string): boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/set) */ - set(name: string, value: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/append) */ - append(name: string, value: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Headers/delete) */ - delete(name: string): void; - forEach(callback: (this: This, value: string, key: string, parent: Headers) => void, thisArg?: This): void; - /* Returns an iterator allowing to go through all key/value pairs contained in this object. */ - entries(): IterableIterator<[ - key: string, - value: string - ]>; - /* Returns an iterator allowing to go through all keys of the key/value pairs contained in this object. */ - keys(): IterableIterator; - /* Returns an iterator allowing to go through all values of the key/value pairs contained in this object. */ - values(): IterableIterator; - [Symbol.iterator](): IterableIterator<[ - key: string, - value: string - ]>; -} -type BodyInit = ReadableStream | string | ArrayBuffer | ArrayBufferView | Blob | URLSearchParams | FormData; -declare abstract class Body { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/body) */ - get body(): ReadableStream | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bodyUsed) */ - get bodyUsed(): boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/arrayBuffer) */ - arrayBuffer(): Promise; - bytes(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/text) */ - text(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/json) */ - json(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/formData) */ - formData(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/blob) */ - blob(): Promise; -} -/** - * This Fetch API interface represents the response to a request. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) - */ -declare var Response: { - prototype: Response; - new (body?: BodyInit | null, init?: ResponseInit): Response; - error(): Response; - redirect(url: string, status?: number): Response; - json(any: any, maybeInit?: (ResponseInit | Response)): Response; -}; -/** - * This Fetch API interface represents the response to a request. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response) - */ -interface Response extends Body { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/clone) */ - clone(): Response; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/status) */ - status: number; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/statusText) */ - statusText: string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/headers) */ - headers: Headers; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/ok) */ - ok: boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/redirected) */ - redirected: boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/url) */ - url: string; - webSocket: WebSocket | null; - cf: any | undefined; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Response/type) */ - type: "default" | "error"; -} -interface ResponseInit { - status?: number; - statusText?: string; - headers?: HeadersInit; - cf?: any; - webSocket?: (WebSocket | null); - encodeBody?: "automatic" | "manual"; -} -type RequestInfo> = Request | string; -/** - * This Fetch API interface represents a resource request. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) - */ -declare var Request: { - prototype: Request; - new >(input: RequestInfo | URL, init?: RequestInit): Request; -}; -/** - * This Fetch API interface represents a resource request. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request) - */ -interface Request> extends Body { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/clone) */ - clone(): Request; - /** - * Returns request's HTTP method, which is "GET" by default. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/method) - */ - method: string; - /** - * Returns the URL of request as a string. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/url) - */ - url: string; - /** - * Returns a Headers object consisting of the headers associated with request. Note that headers added in the network layer by the user agent will not be accounted for in this object, e.g., the "Host" header. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/headers) - */ - headers: Headers; - /** - * Returns the redirect mode associated with request, which is a string indicating how redirects for the request will be handled during fetching. A request will follow redirects by default. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/redirect) - */ - redirect: string; - fetcher: Fetcher | null; - /** - * Returns the signal associated with request, which is an AbortSignal object indicating whether or not request has been aborted, and its abort event handler. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/signal) - */ - signal: AbortSignal; - cf: Cf | undefined; - /** - * Returns request's subresource integrity metadata, which is a cryptographic hash of the resource being fetched. Its value consists of multiple hashes separated by whitespace. [SRI] - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/integrity) - */ - integrity: string; - /* Returns a boolean indicating whether or not request can outlive the global in which it was created. */ - keepalive: boolean; -} -interface RequestInit { - /* A string to set request's method. */ - method?: string; - /* A Headers object, an object literal, or an array of two-item arrays to set request's headers. */ - headers?: HeadersInit; - /* A BodyInit object or null to set request's body. */ - body?: BodyInit | null; - /* A string indicating whether request follows redirects, results in an error upon encountering a redirect, or returns the redirect (in an opaque fashion). Sets request's redirect. */ - redirect?: string; - fetcher?: (Fetcher | null); - cf?: Cf; - /* A cryptographic hash of the resource to be fetched by request. Sets request's integrity. */ - integrity?: string; - /* An AbortSignal to set request's signal. */ - signal?: (AbortSignal | null); - encodeResponseBody?: "automatic" | "manual"; -} -type Service = Fetcher; -type Fetcher = (T extends Rpc.EntrypointBranded ? Rpc.Provider : unknown) & { - fetch(input: RequestInfo | URL, init?: RequestInit): Promise; - connect(address: SocketAddress | string, options?: SocketOptions): Socket; -}; -interface KVNamespaceListKey { - name: Key; - expiration?: number; - metadata?: Metadata; -} -type KVNamespaceListResult = { - list_complete: false; - keys: KVNamespaceListKey[]; - cursor: string; - cacheStatus: string | null; -} | { - list_complete: true; - keys: KVNamespaceListKey[]; - cacheStatus: string | null; -}; -interface KVNamespace { - get(key: Key, options?: Partial>): Promise; - get(key: Key, type: "text"): Promise; - get(key: Key, type: "json"): Promise; - get(key: Key, type: "arrayBuffer"): Promise; - get(key: Key, type: "stream"): Promise; - get(key: Key, options?: KVNamespaceGetOptions<"text">): Promise; - get(key: Key, options?: KVNamespaceGetOptions<"json">): Promise; - get(key: Key, options?: KVNamespaceGetOptions<"arrayBuffer">): Promise; - get(key: Key, options?: KVNamespaceGetOptions<"stream">): Promise; - get(key: Array, type: "text"): Promise>; - get(key: Array, type: "json"): Promise>; - get(key: Array, options?: Partial>): Promise>; - get(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>; - get(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>; - list(options?: KVNamespaceListOptions): Promise>; - put(key: Key, value: string | ArrayBuffer | ArrayBufferView | ReadableStream, options?: KVNamespacePutOptions): Promise; - getWithMetadata(key: Key, options?: Partial>): Promise>; - getWithMetadata(key: Key, type: "text"): Promise>; - getWithMetadata(key: Key, type: "json"): Promise>; - getWithMetadata(key: Key, type: "arrayBuffer"): Promise>; - getWithMetadata(key: Key, type: "stream"): Promise>; - getWithMetadata(key: Key, options: KVNamespaceGetOptions<"text">): Promise>; - getWithMetadata(key: Key, options: KVNamespaceGetOptions<"json">): Promise>; - getWithMetadata(key: Key, options: KVNamespaceGetOptions<"arrayBuffer">): Promise>; - getWithMetadata(key: Key, options: KVNamespaceGetOptions<"stream">): Promise>; - getWithMetadata(key: Array, type: "text"): Promise>>; - getWithMetadata(key: Array, type: "json"): Promise>>; - getWithMetadata(key: Array, options?: Partial>): Promise>>; - getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"text">): Promise>>; - getWithMetadata(key: Array, options?: KVNamespaceGetOptions<"json">): Promise>>; - delete(key: Key): Promise; -} -interface KVNamespaceListOptions { - limit?: number; - prefix?: (string | null); - cursor?: (string | null); -} -interface KVNamespaceGetOptions { - type: Type; - cacheTtl?: number; -} -interface KVNamespacePutOptions { - expiration?: number; - expirationTtl?: number; - metadata?: (any | null); -} -interface KVNamespaceGetWithMetadataResult { - value: Value | null; - metadata: Metadata | null; - cacheStatus: string | null; -} -type QueueContentType = "text" | "bytes" | "json" | "v8"; -interface Queue { - send(message: Body, options?: QueueSendOptions): Promise; - sendBatch(messages: Iterable>, options?: QueueSendBatchOptions): Promise; -} -interface QueueSendOptions { - contentType?: QueueContentType; - delaySeconds?: number; -} -interface QueueSendBatchOptions { - delaySeconds?: number; -} -interface MessageSendRequest { - body: Body; - contentType?: QueueContentType; - delaySeconds?: number; -} -interface QueueRetryOptions { - delaySeconds?: number; -} -interface Message { - readonly id: string; - readonly timestamp: Date; - readonly body: Body; - readonly attempts: number; - retry(options?: QueueRetryOptions): void; - ack(): void; -} -interface QueueEvent extends ExtendableEvent { - readonly messages: readonly Message[]; - readonly queue: string; - retryAll(options?: QueueRetryOptions): void; - ackAll(): void; -} -interface MessageBatch { - readonly messages: readonly Message[]; - readonly queue: string; - retryAll(options?: QueueRetryOptions): void; - ackAll(): void; -} -interface R2Error extends Error { - readonly name: string; - readonly code: number; - readonly message: string; - readonly action: string; - readonly stack: any; -} -interface R2ListOptions { - limit?: number; - prefix?: string; - cursor?: string; - delimiter?: string; - startAfter?: string; - include?: ("httpMetadata" | "customMetadata")[]; -} -declare abstract class R2Bucket { - head(key: string): Promise; - get(key: string, options: R2GetOptions & { - onlyIf: R2Conditional | Headers; - }): Promise; - get(key: string, options?: R2GetOptions): Promise; - put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions & { - onlyIf: R2Conditional | Headers; - }): Promise; - put(key: string, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | null | Blob, options?: R2PutOptions): Promise; - createMultipartUpload(key: string, options?: R2MultipartOptions): Promise; - resumeMultipartUpload(key: string, uploadId: string): R2MultipartUpload; - delete(keys: string | string[]): Promise; - list(options?: R2ListOptions): Promise; -} -interface R2MultipartUpload { - readonly key: string; - readonly uploadId: string; - uploadPart(partNumber: number, value: ReadableStream | (ArrayBuffer | ArrayBufferView) | string | Blob, options?: R2UploadPartOptions): Promise; - abort(): Promise; - complete(uploadedParts: R2UploadedPart[]): Promise; -} -interface R2UploadedPart { - partNumber: number; - etag: string; -} -declare abstract class R2Object { - readonly key: string; - readonly version: string; - readonly size: number; - readonly etag: string; - readonly httpEtag: string; - readonly checksums: R2Checksums; - readonly uploaded: Date; - readonly httpMetadata?: R2HTTPMetadata; - readonly customMetadata?: Record; - readonly range?: R2Range; - readonly storageClass: string; - readonly ssecKeyMd5?: string; - writeHttpMetadata(headers: Headers): void; -} -interface R2ObjectBody extends R2Object { - get body(): ReadableStream; - get bodyUsed(): boolean; - arrayBuffer(): Promise; - text(): Promise; - json(): Promise; - blob(): Promise; -} -type R2Range = { - offset: number; - length?: number; -} | { - offset?: number; - length: number; -} | { - suffix: number; -}; -interface R2Conditional { - etagMatches?: string; - etagDoesNotMatch?: string; - uploadedBefore?: Date; - uploadedAfter?: Date; - secondsGranularity?: boolean; -} -interface R2GetOptions { - onlyIf?: (R2Conditional | Headers); - range?: (R2Range | Headers); - ssecKey?: (ArrayBuffer | string); -} -interface R2PutOptions { - onlyIf?: (R2Conditional | Headers); - httpMetadata?: (R2HTTPMetadata | Headers); - customMetadata?: Record; - md5?: (ArrayBuffer | string); - sha1?: (ArrayBuffer | string); - sha256?: (ArrayBuffer | string); - sha384?: (ArrayBuffer | string); - sha512?: (ArrayBuffer | string); - storageClass?: string; - ssecKey?: (ArrayBuffer | string); -} -interface R2MultipartOptions { - httpMetadata?: (R2HTTPMetadata | Headers); - customMetadata?: Record; - storageClass?: string; - ssecKey?: (ArrayBuffer | string); -} -interface R2Checksums { - readonly md5?: ArrayBuffer; - readonly sha1?: ArrayBuffer; - readonly sha256?: ArrayBuffer; - readonly sha384?: ArrayBuffer; - readonly sha512?: ArrayBuffer; - toJSON(): R2StringChecksums; -} -interface R2StringChecksums { - md5?: string; - sha1?: string; - sha256?: string; - sha384?: string; - sha512?: string; -} -interface R2HTTPMetadata { - contentType?: string; - contentLanguage?: string; - contentDisposition?: string; - contentEncoding?: string; - cacheControl?: string; - cacheExpiry?: Date; -} -type R2Objects = { - objects: R2Object[]; - delimitedPrefixes: string[]; -} & ({ - truncated: true; - cursor: string; -} | { - truncated: false; -}); -interface R2UploadPartOptions { - ssecKey?: (ArrayBuffer | string); -} -declare abstract class ScheduledEvent extends ExtendableEvent { - readonly scheduledTime: number; - readonly cron: string; - noRetry(): void; -} -interface ScheduledController { - readonly scheduledTime: number; - readonly cron: string; - noRetry(): void; -} -interface QueuingStrategy { - highWaterMark?: (number | bigint); - size?: (chunk: T) => number | bigint; -} -interface UnderlyingSink { - type?: string; - start?: (controller: WritableStreamDefaultController) => void | Promise; - write?: (chunk: W, controller: WritableStreamDefaultController) => void | Promise; - abort?: (reason: any) => void | Promise; - close?: () => void | Promise; -} -interface UnderlyingByteSource { - type: "bytes"; - autoAllocateChunkSize?: number; - start?: (controller: ReadableByteStreamController) => void | Promise; - pull?: (controller: ReadableByteStreamController) => void | Promise; - cancel?: (reason: any) => void | Promise; -} -interface UnderlyingSource { - type?: "" | undefined; - start?: (controller: ReadableStreamDefaultController) => void | Promise; - pull?: (controller: ReadableStreamDefaultController) => void | Promise; - cancel?: (reason: any) => void | Promise; - expectedLength?: (number | bigint); -} -interface Transformer { - readableType?: string; - writableType?: string; - start?: (controller: TransformStreamDefaultController) => void | Promise; - transform?: (chunk: I, controller: TransformStreamDefaultController) => void | Promise; - flush?: (controller: TransformStreamDefaultController) => void | Promise; - cancel?: (reason: any) => void | Promise; - expectedLength?: number; -} -interface StreamPipeOptions { - /** - * Pipes this readable stream to a given writable stream destination. The way in which the piping process behaves under various error conditions can be customized with a number of passed options. It returns a promise that fulfills when the piping process completes successfully, or rejects if any errors were encountered. - * - * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. - * - * Errors and closures of the source and destination streams propagate as follows: - * - * An error in this source readable stream will abort destination, unless preventAbort is truthy. The returned promise will be rejected with the source's error, or with any error that occurs during aborting the destination. - * - * An error in destination will cancel this source readable stream, unless preventCancel is truthy. The returned promise will be rejected with the destination's error, or with any error that occurs during canceling the source. - * - * When this source readable stream closes, destination will be closed, unless preventClose is truthy. The returned promise will be fulfilled once this process completes, unless an error is encountered while closing the destination, in which case it will be rejected with that error. - * - * If destination starts out closed or closing, this source readable stream will be canceled, unless preventCancel is true. The returned promise will be rejected with an error indicating piping to a closed stream failed, or with any error that occurs during canceling the source. - * - * The signal option can be set to an AbortSignal to allow aborting an ongoing pipe operation via the corresponding AbortController. In this case, this source readable stream will be canceled, and destination aborted, unless the respective options preventCancel or preventAbort are set. - */ - preventClose?: boolean; - preventAbort?: boolean; - preventCancel?: boolean; - signal?: AbortSignal; -} -type ReadableStreamReadResult = { - done: false; - value: R; -} | { - done: true; - value?: undefined; -}; -/** - * This Streams API interface represents a readable stream of byte data. The Fetch API offers a concrete instance of a ReadableStream through the body property of a Response object. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) - */ -interface ReadableStream { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/locked) */ - get locked(): boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/cancel) */ - cancel(reason?: any): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) */ - getReader(): ReadableStreamDefaultReader; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/getReader) */ - getReader(options: ReadableStreamGetReaderOptions): ReadableStreamBYOBReader; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeThrough) */ - pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/pipeTo) */ - pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream/tee) */ - tee(): [ - ReadableStream, - ReadableStream - ]; - values(options?: ReadableStreamValuesOptions): AsyncIterableIterator; - [Symbol.asyncIterator](options?: ReadableStreamValuesOptions): AsyncIterableIterator; -} -/** - * This Streams API interface represents a readable stream of byte data. The Fetch API offers a concrete instance of a ReadableStream through the body property of a Response object. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStream) - */ -declare const ReadableStream: { - prototype: ReadableStream; - new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; - new (underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; -}; -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader) */ -declare class ReadableStreamDefaultReader { - constructor(stream: ReadableStream); - get closed(): Promise; - cancel(reason?: any): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/read) */ - read(): Promise>; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultReader/releaseLock) */ - releaseLock(): void; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader) */ -declare class ReadableStreamBYOBReader { - constructor(stream: ReadableStream); - get closed(): Promise; - cancel(reason?: any): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/read) */ - read(view: T): Promise>; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBReader/releaseLock) */ - releaseLock(): void; - readAtLeast(minElements: number, view: T): Promise>; -} -interface ReadableStreamBYOBReaderReadableStreamBYOBReaderReadOptions { - min?: number; -} -interface ReadableStreamGetReaderOptions { - /** - * Creates a ReadableStreamBYOBReader and locks the stream to the new reader. - * - * This call behaves the same way as the no-argument variant, except that it only works on readable byte streams, i.e. streams which were constructed specifically with the ability to handle "bring your own buffer" reading. The returned BYOB reader provides the ability to directly read individual chunks from the stream via its read() method, into developer-supplied buffers, allowing more precise control over allocation. - */ - mode: "byob"; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest) */ -declare abstract class ReadableStreamBYOBRequest { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/view) */ - get view(): Uint8Array | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respond) */ - respond(bytesWritten: number): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamBYOBRequest/respondWithNewView) */ - respondWithNewView(view: ArrayBuffer | ArrayBufferView): void; - get atLeast(): number | null; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController) */ -declare abstract class ReadableStreamDefaultController { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/desiredSize) */ - get desiredSize(): number | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/close) */ - close(): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/enqueue) */ - enqueue(chunk?: R): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableStreamDefaultController/error) */ - error(reason: any): void; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController) */ -declare abstract class ReadableByteStreamController { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/byobRequest) */ - get byobRequest(): ReadableStreamBYOBRequest | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/desiredSize) */ - get desiredSize(): number | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/close) */ - close(): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/enqueue) */ - enqueue(chunk: ArrayBuffer | ArrayBufferView): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ReadableByteStreamController/error) */ - error(reason: any): void; -} -/** - * This Streams API interface represents a controller allowing control of a WritableStream's state. When constructing a WritableStream, the underlying sink is given a corresponding WritableStreamDefaultController instance to manipulate. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController) - */ -declare abstract class WritableStreamDefaultController { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/signal) */ - get signal(): AbortSignal; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultController/error) */ - error(reason?: any): void; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController) */ -declare abstract class TransformStreamDefaultController { - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/desiredSize) */ - get desiredSize(): number | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/enqueue) */ - enqueue(chunk?: O): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/error) */ - error(reason: any): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStreamDefaultController/terminate) */ - terminate(): void; -} -interface ReadableWritablePair { - /** - * Provides a convenient, chainable way of piping this readable stream through a transform stream (or any other { writable, readable } pair). It simply pipes the stream into the writable side of the supplied pair, and returns the readable side for further use. - * - * Piping a stream will lock it for the duration of the pipe, preventing any other consumer from acquiring a reader. - */ - writable: WritableStream; - readable: ReadableStream; -} -/** - * This Streams API interface provides a standard abstraction for writing streaming data to a destination, known as a sink. This object comes with built-in backpressure and queuing. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream) - */ -declare class WritableStream { - constructor(underlyingSink?: UnderlyingSink, queuingStrategy?: QueuingStrategy); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/locked) */ - get locked(): boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/abort) */ - abort(reason?: any): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/close) */ - close(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStream/getWriter) */ - getWriter(): WritableStreamDefaultWriter; -} -/** - * This Streams API interface is the object returned by WritableStream.getWriter() and once created locks the < writer to the WritableStream ensuring that no other streams can write to the underlying sink. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter) - */ -declare class WritableStreamDefaultWriter { - constructor(stream: WritableStream); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/closed) */ - get closed(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/ready) */ - get ready(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/desiredSize) */ - get desiredSize(): number | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/abort) */ - abort(reason?: any): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/close) */ - close(): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/write) */ - write(chunk?: W): Promise; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WritableStreamDefaultWriter/releaseLock) */ - releaseLock(): void; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream) */ -declare class TransformStream { - constructor(transformer?: Transformer, writableStrategy?: QueuingStrategy, readableStrategy?: QueuingStrategy); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/readable) */ - get readable(): ReadableStream; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TransformStream/writable) */ - get writable(): WritableStream; -} -declare class FixedLengthStream extends IdentityTransformStream { - constructor(expectedLength: number | bigint, queuingStrategy?: IdentityTransformStreamQueuingStrategy); -} -declare class IdentityTransformStream extends TransformStream { - constructor(queuingStrategy?: IdentityTransformStreamQueuingStrategy); -} -interface IdentityTransformStreamQueuingStrategy { - highWaterMark?: (number | bigint); -} -interface ReadableStreamValuesOptions { - preventCancel?: boolean; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CompressionStream) */ -declare class CompressionStream extends TransformStream { - constructor(format: "gzip" | "deflate" | "deflate-raw"); -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/DecompressionStream) */ -declare class DecompressionStream extends TransformStream { - constructor(format: "gzip" | "deflate" | "deflate-raw"); -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextEncoderStream) */ -declare class TextEncoderStream extends TransformStream { - constructor(); - get encoding(): string; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoderStream) */ -declare class TextDecoderStream extends TransformStream { - constructor(label?: string, options?: TextDecoderStreamTextDecoderStreamInit); - get encoding(): string; - get fatal(): boolean; - get ignoreBOM(): boolean; -} -interface TextDecoderStreamTextDecoderStreamInit { - fatal?: boolean; - ignoreBOM?: boolean; -} -/** - * This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy) - */ -declare class ByteLengthQueuingStrategy implements QueuingStrategy { - constructor(init: QueuingStrategyInit); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/highWaterMark) */ - get highWaterMark(): number; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/ByteLengthQueuingStrategy/size) */ - get size(): (chunk?: any) => number; -} -/** - * This Streams API interface provides a built-in byte length queuing strategy that can be used when constructing streams. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy) - */ -declare class CountQueuingStrategy implements QueuingStrategy { - constructor(init: QueuingStrategyInit); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/highWaterMark) */ - get highWaterMark(): number; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CountQueuingStrategy/size) */ - get size(): (chunk?: any) => number; -} -interface QueuingStrategyInit { - /** - * Creates a new ByteLengthQueuingStrategy with the provided high water mark. - * - * Note that the provided high water mark will not be validated ahead of time. Instead, if it is negative, NaN, or not a number, the resulting ByteLengthQueuingStrategy will cause the corresponding stream constructor to throw. - */ - highWaterMark: number; -} -interface ScriptVersion { - id?: string; - tag?: string; - message?: string; -} -declare abstract class TailEvent extends ExtendableEvent { - readonly events: TraceItem[]; - readonly traces: TraceItem[]; -} -interface TraceItem { - readonly event: (TraceItemFetchEventInfo | TraceItemJsRpcEventInfo | TraceItemScheduledEventInfo | TraceItemAlarmEventInfo | TraceItemQueueEventInfo | TraceItemEmailEventInfo | TraceItemTailEventInfo | TraceItemCustomEventInfo | TraceItemHibernatableWebSocketEventInfo) | null; - readonly eventTimestamp: number | null; - readonly logs: TraceLog[]; - readonly exceptions: TraceException[]; - readonly diagnosticsChannelEvents: TraceDiagnosticChannelEvent[]; - readonly scriptName: string | null; - readonly entrypoint?: string; - readonly scriptVersion?: ScriptVersion; - readonly dispatchNamespace?: string; - readonly scriptTags?: string[]; - readonly outcome: string; - readonly executionModel: string; - readonly truncated: boolean; - readonly cpuTime: number; - readonly wallTime: number; -} -interface TraceItemAlarmEventInfo { - readonly scheduledTime: Date; -} -interface TraceItemCustomEventInfo { -} -interface TraceItemScheduledEventInfo { - readonly scheduledTime: number; - readonly cron: string; -} -interface TraceItemQueueEventInfo { - readonly queue: string; - readonly batchSize: number; -} -interface TraceItemEmailEventInfo { - readonly mailFrom: string; - readonly rcptTo: string; - readonly rawSize: number; -} -interface TraceItemTailEventInfo { - readonly consumedEvents: TraceItemTailEventInfoTailItem[]; -} -interface TraceItemTailEventInfoTailItem { - readonly scriptName: string | null; -} -interface TraceItemFetchEventInfo { - readonly response?: TraceItemFetchEventInfoResponse; - readonly request: TraceItemFetchEventInfoRequest; -} -interface TraceItemFetchEventInfoRequest { - readonly cf?: any; - readonly headers: Record; - readonly method: string; - readonly url: string; - getUnredacted(): TraceItemFetchEventInfoRequest; -} -interface TraceItemFetchEventInfoResponse { - readonly status: number; -} -interface TraceItemJsRpcEventInfo { - readonly rpcMethod: string; -} -interface TraceItemHibernatableWebSocketEventInfo { - readonly getWebSocketEvent: TraceItemHibernatableWebSocketEventInfoMessage | TraceItemHibernatableWebSocketEventInfoClose | TraceItemHibernatableWebSocketEventInfoError; -} -interface TraceItemHibernatableWebSocketEventInfoMessage { - readonly webSocketEventType: string; -} -interface TraceItemHibernatableWebSocketEventInfoClose { - readonly webSocketEventType: string; - readonly code: number; - readonly wasClean: boolean; -} -interface TraceItemHibernatableWebSocketEventInfoError { - readonly webSocketEventType: string; -} -interface TraceLog { - readonly timestamp: number; - readonly level: string; - readonly message: any; -} -interface TraceException { - readonly timestamp: number; - readonly message: string; - readonly name: string; - readonly stack?: string; -} -interface TraceDiagnosticChannelEvent { - readonly timestamp: number; - readonly channel: string; - readonly message: any; -} -interface TraceMetrics { - readonly cpuTime: number; - readonly wallTime: number; -} -interface UnsafeTraceMetrics { - fromTrace(item: TraceItem): TraceMetrics; -} -/** - * The URL interface represents an object providing static methods used for creating object URLs. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL) - */ -declare class URL { - constructor(url: string | URL, base?: string | URL); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/origin) */ - get origin(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) */ - get href(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/href) */ - set href(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) */ - get protocol(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/protocol) */ - set protocol(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) */ - get username(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/username) */ - set username(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) */ - get password(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/password) */ - set password(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) */ - get host(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/host) */ - set host(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) */ - get hostname(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hostname) */ - set hostname(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) */ - get port(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/port) */ - set port(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) */ - get pathname(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/pathname) */ - set pathname(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) */ - get search(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/search) */ - set search(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) */ - get hash(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/hash) */ - set hash(value: string); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/searchParams) */ - get searchParams(): URLSearchParams; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/toJSON) */ - toJSON(): string; - /*function toString() { [native code] }*/ - toString(): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/canParse_static) */ - static canParse(url: string, base?: string): boolean; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/parse_static) */ - static parse(url: string, base?: string): URL | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/createObjectURL_static) */ - static createObjectURL(object: File | Blob): string; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URL/revokeObjectURL_static) */ - static revokeObjectURL(object_url: string): void; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams) */ -declare class URLSearchParams { - constructor(init?: (Iterable> | Record | string)); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/size) */ - get size(): number; - /** - * Appends a specified key/value pair as a new search parameter. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/append) - */ - append(name: string, value: string): void; - /** - * Deletes the given search parameter, and its associated value, from the list of all search parameters. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/delete) - */ - delete(name: string, value?: string): void; - /** - * Returns the first value associated to the given search parameter. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/get) - */ - get(name: string): string | null; - /** - * Returns all the values association with a given search parameter. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/getAll) - */ - getAll(name: string): string[]; - /** - * Returns a Boolean indicating if such a search parameter exists. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/has) - */ - has(name: string, value?: string): boolean; - /** - * Sets the value associated to a given search parameter to the given value. If there were several values, delete the others. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/set) - */ - set(name: string, value: string): void; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/URLSearchParams/sort) */ - sort(): void; - /* Returns an array of key, value pairs for every entry in the search params. */ - entries(): IterableIterator<[ - key: string, - value: string - ]>; - /* Returns a list of keys in the search params. */ - keys(): IterableIterator; - /* Returns a list of values in the search params. */ - values(): IterableIterator; - forEach(callback: (this: This, value: string, key: string, parent: URLSearchParams) => void, thisArg?: This): void; - /*function toString() { [native code] } Returns a string containing a query string suitable for use in a URL. Does not include the question mark. */ - toString(): string; - [Symbol.iterator](): IterableIterator<[ - key: string, - value: string - ]>; -} -declare class URLPattern { - constructor(input?: (string | URLPatternURLPatternInit), baseURL?: (string | URLPatternURLPatternOptions), patternOptions?: URLPatternURLPatternOptions); - get protocol(): string; - get username(): string; - get password(): string; - get hostname(): string; - get port(): string; - get pathname(): string; - get search(): string; - get hash(): string; - test(input?: (string | URLPatternURLPatternInit), baseURL?: string): boolean; - exec(input?: (string | URLPatternURLPatternInit), baseURL?: string): URLPatternURLPatternResult | null; -} -interface URLPatternURLPatternInit { - protocol?: string; - username?: string; - password?: string; - hostname?: string; - port?: string; - pathname?: string; - search?: string; - hash?: string; - baseURL?: string; -} -interface URLPatternURLPatternComponentResult { - input: string; - groups: Record; -} -interface URLPatternURLPatternResult { - inputs: (string | URLPatternURLPatternInit)[]; - protocol: URLPatternURLPatternComponentResult; - username: URLPatternURLPatternComponentResult; - password: URLPatternURLPatternComponentResult; - hostname: URLPatternURLPatternComponentResult; - port: URLPatternURLPatternComponentResult; - pathname: URLPatternURLPatternComponentResult; - search: URLPatternURLPatternComponentResult; - hash: URLPatternURLPatternComponentResult; -} -interface URLPatternURLPatternOptions { - ignoreCase?: boolean; -} -/** - * A CloseEvent is sent to clients using WebSockets when the connection is closed. This is delivered to the listener indicated by the WebSocket object's onclose attribute. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent) - */ -declare class CloseEvent extends Event { - constructor(type: string, initializer?: CloseEventInit); - /** - * Returns the WebSocket connection close code provided by the server. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/code) - */ - readonly code: number; - /** - * Returns the WebSocket connection close reason provided by the server. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/reason) - */ - readonly reason: string; - /** - * Returns true if the connection closed cleanly; false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/CloseEvent/wasClean) - */ - readonly wasClean: boolean; -} -interface CloseEventInit { - code?: number; - reason?: string; - wasClean?: boolean; -} -/** - * A message received by a target object. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent) - */ -declare class MessageEvent extends Event { - constructor(type: string, initializer: MessageEventInit); - /** - * Returns the data of the message. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/MessageEvent/data) - */ - readonly data: ArrayBuffer | string; -} -interface MessageEventInit { - data: ArrayBuffer | string; -} -type WebSocketEventMap = { - close: CloseEvent; - message: MessageEvent; - open: Event; - error: ErrorEvent; -}; -/** - * Provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) - */ -declare var WebSocket: { - prototype: WebSocket; - new (url: string, protocols?: (string[] | string)): WebSocket; - readonly READY_STATE_CONNECTING: number; - readonly CONNECTING: number; - readonly READY_STATE_OPEN: number; - readonly OPEN: number; - readonly READY_STATE_CLOSING: number; - readonly CLOSING: number; - readonly READY_STATE_CLOSED: number; - readonly CLOSED: number; -}; -/** - * Provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket) - */ -interface WebSocket extends EventTarget { - accept(): void; - /** - * Transmits data using the WebSocket connection. data can be a string, a Blob, an ArrayBuffer, or an ArrayBufferView. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/send) - */ - send(message: (ArrayBuffer | ArrayBufferView) | string): void; - /** - * Closes the WebSocket connection, optionally using code as the the WebSocket connection close code and reason as the the WebSocket connection close reason. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/close) - */ - close(code?: number, reason?: string): void; - serializeAttachment(attachment: any): void; - deserializeAttachment(): any | null; - /** - * Returns the state of the WebSocket object's connection. It can have the values described below. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/readyState) - */ - readyState: number; - /** - * Returns the URL that was used to establish the WebSocket connection. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/url) - */ - url: string | null; - /** - * Returns the subprotocol selected by the server, if any. It can be used in conjunction with the array form of the constructor's second argument to perform subprotocol negotiation. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/protocol) - */ - protocol: string | null; - /** - * Returns the extensions selected by the server, if any. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebSocket/extensions) - */ - extensions: string | null; -} -declare const WebSocketPair: { - new (): { - 0: WebSocket; - 1: WebSocket; - }; -}; -interface SqlStorage { - exec>(query: string, ...bindings: any[]): SqlStorageCursor; - get databaseSize(): number; - Cursor: typeof SqlStorageCursor; - Statement: typeof SqlStorageStatement; -} -declare abstract class SqlStorageStatement { -} -type SqlStorageValue = ArrayBuffer | string | number | null; -declare abstract class SqlStorageCursor> { - next(): { - done?: false; - value: T; - } | { - done: true; - value?: never; - }; - toArray(): T[]; - one(): T; - raw(): IterableIterator; - columnNames: string[]; - get rowsRead(): number; - get rowsWritten(): number; - [Symbol.iterator](): IterableIterator; -} -interface Socket { - get readable(): ReadableStream; - get writable(): WritableStream; - get closed(): Promise; - get opened(): Promise; - close(): Promise; - startTls(options?: TlsOptions): Socket; -} -interface SocketOptions { - secureTransport?: string; - allowHalfOpen: boolean; - highWaterMark?: (number | bigint); -} -interface SocketAddress { - hostname: string; - port: number; -} -interface TlsOptions { - expectedServerHostname?: string; -} -interface SocketInfo { - remoteAddress?: string; - localAddress?: string; -} -/* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource) */ -declare class EventSource extends EventTarget { - constructor(url: string, init?: EventSourceEventSourceInit); - /** - * Aborts any instances of the fetch algorithm started for this EventSource object, and sets the readyState attribute to CLOSED. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/close) - */ - close(): void; - /** - * Returns the URL providing the event stream. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/url) - */ - get url(): string; - /** - * Returns true if the credentials mode for connection requests to the URL providing the event stream is set to "include", and false otherwise. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/withCredentials) - */ - get withCredentials(): boolean; - /** - * Returns the state of this EventSource object's connection. It can have the values described below. - * - * [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/readyState) - */ - get readyState(): number; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ - get onopen(): any | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/open_event) */ - set onopen(value: any | null); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ - get onmessage(): any | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/message_event) */ - set onmessage(value: any | null); - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ - get onerror(): any | null; - /* [MDN Reference](https://developer.mozilla.org/docs/Web/API/EventSource/error_event) */ - set onerror(value: any | null); - static readonly CONNECTING: number; - static readonly OPEN: number; - static readonly CLOSED: number; - static from(stream: ReadableStream): EventSource; -} -interface EventSourceEventSourceInit { - withCredentials?: boolean; - fetcher?: Fetcher; -} -interface Container { - get running(): boolean; - start(options?: ContainerStartupOptions): void; - monitor(): Promise; - destroy(error?: any): Promise; - signal(signo: number): void; - getTcpPort(port: number): Fetcher; -} -interface ContainerStartupOptions { - entrypoint?: string[]; - enableInternet: boolean; - env?: Record; -} -type AiImageClassificationInput = { - image: number[]; -}; -type AiImageClassificationOutput = { - score?: number; - label?: string; -}[]; -declare abstract class BaseAiImageClassification { - inputs: AiImageClassificationInput; - postProcessedOutputs: AiImageClassificationOutput; -} -type AiImageToTextInput = { - image: number[]; - prompt?: string; - max_tokens?: number; - temperature?: number; - top_p?: number; - top_k?: number; - seed?: number; - repetition_penalty?: number; - frequency_penalty?: number; - presence_penalty?: number; - raw?: boolean; - messages?: RoleScopedChatInput[]; -}; -type AiImageToTextOutput = { - description: string; -}; -declare abstract class BaseAiImageToText { - inputs: AiImageToTextInput; - postProcessedOutputs: AiImageToTextOutput; -} -type AiImageTextToTextInput = { - image: string; - prompt?: string; - max_tokens?: number; - temperature?: number; - ignore_eos?: boolean; - top_p?: number; - top_k?: number; - seed?: number; - repetition_penalty?: number; - frequency_penalty?: number; - presence_penalty?: number; - raw?: boolean; - messages?: RoleScopedChatInput[]; -}; -type AiImageTextToTextOutput = { - description: string; -}; -declare abstract class BaseAiImageTextToText { - inputs: AiImageTextToTextInput; - postProcessedOutputs: AiImageTextToTextOutput; -} -type AiObjectDetectionInput = { - image: number[]; -}; -type AiObjectDetectionOutput = { - score?: number; - label?: string; -}[]; -declare abstract class BaseAiObjectDetection { - inputs: AiObjectDetectionInput; - postProcessedOutputs: AiObjectDetectionOutput; -} -type AiSentenceSimilarityInput = { - source: string; - sentences: string[]; -}; -type AiSentenceSimilarityOutput = number[]; -declare abstract class BaseAiSentenceSimilarity { - inputs: AiSentenceSimilarityInput; - postProcessedOutputs: AiSentenceSimilarityOutput; -} -type AiAutomaticSpeechRecognitionInput = { - audio: number[]; -}; -type AiAutomaticSpeechRecognitionOutput = { - text?: string; - words?: { - word: string; - start: number; - end: number; - }[]; - vtt?: string; -}; -declare abstract class BaseAiAutomaticSpeechRecognition { - inputs: AiAutomaticSpeechRecognitionInput; - postProcessedOutputs: AiAutomaticSpeechRecognitionOutput; -} -type AiSummarizationInput = { - input_text: string; - max_length?: number; -}; -type AiSummarizationOutput = { - summary: string; -}; -declare abstract class BaseAiSummarization { - inputs: AiSummarizationInput; - postProcessedOutputs: AiSummarizationOutput; -} -type AiTextClassificationInput = { - text: string; -}; -type AiTextClassificationOutput = { - score?: number; - label?: string; -}[]; -declare abstract class BaseAiTextClassification { - inputs: AiTextClassificationInput; - postProcessedOutputs: AiTextClassificationOutput; -} -type AiTextEmbeddingsInput = { - text: string | string[]; -}; -type AiTextEmbeddingsOutput = { - shape: number[]; - data: number[][]; -}; -declare abstract class BaseAiTextEmbeddings { - inputs: AiTextEmbeddingsInput; - postProcessedOutputs: AiTextEmbeddingsOutput; -} -type RoleScopedChatInput = { - role: "user" | "assistant" | "system" | "tool" | (string & NonNullable); - content: string; - name?: string; -}; -type AiTextGenerationToolLegacyInput = { - name: string; - description: string; - parameters?: { - type: "object" | (string & NonNullable); - properties: { - [key: string]: { - type: string; - description?: string; - }; - }; - required: string[]; - }; -}; -type AiTextGenerationToolInput = { - type: "function" | (string & NonNullable); - function: { - name: string; - description: string; - parameters?: { - type: "object" | (string & NonNullable); - properties: { - [key: string]: { - type: string; - description?: string; - }; - }; - required: string[]; - }; - }; -}; -type AiTextGenerationFunctionsInput = { - name: string; - code: string; -}; -type AiTextGenerationResponseFormat = { - type: string; - json_schema?: any; -}; -type AiTextGenerationInput = { - prompt?: string; - raw?: boolean; - stream?: boolean; - max_tokens?: number; - temperature?: number; - top_p?: number; - top_k?: number; - seed?: number; - repetition_penalty?: number; - frequency_penalty?: number; - presence_penalty?: number; - messages?: RoleScopedChatInput[]; - response_format?: AiTextGenerationResponseFormat; - tools?: AiTextGenerationToolInput[] | AiTextGenerationToolLegacyInput[] | (object & NonNullable); - functions?: AiTextGenerationFunctionsInput[]; -}; -type AiTextGenerationOutput = { - response?: string; - tool_calls?: { - name: string; - arguments: unknown; - }[]; -} | ReadableStream; -declare abstract class BaseAiTextGeneration { - inputs: AiTextGenerationInput; - postProcessedOutputs: AiTextGenerationOutput; -} -type AiTextToSpeechInput = { - prompt: string; - lang?: string; -}; -type AiTextToSpeechOutput = Uint8Array | { - audio: string; -}; -declare abstract class BaseAiTextToSpeech { - inputs: AiTextToSpeechInput; - postProcessedOutputs: AiTextToSpeechOutput; -} -type AiTextToImageInput = { - prompt: string; - negative_prompt?: string; - height?: number; - width?: number; - image?: number[]; - image_b64?: string; - mask?: number[]; - num_steps?: number; - strength?: number; - guidance?: number; - seed?: number; -}; -type AiTextToImageOutput = ReadableStream; -declare abstract class BaseAiTextToImage { - inputs: AiTextToImageInput; - postProcessedOutputs: AiTextToImageOutput; -} -type AiTranslationInput = { - text: string; - target_lang: string; - source_lang?: string; -}; -type AiTranslationOutput = { - translated_text?: string; -}; -declare abstract class BaseAiTranslation { - inputs: AiTranslationInput; - postProcessedOutputs: AiTranslationOutput; -} -type Ai_Cf_Openai_Whisper_Input = string | { - /** - * An array of integers that represent the audio data constrained to 8-bit unsigned integer values - */ - audio: number[]; -}; -interface Ai_Cf_Openai_Whisper_Output { - /** - * The transcription - */ - text: string; - word_count?: number; - words?: { - word?: string; - /** - * The second this word begins in the recording - */ - start?: number; - /** - * The ending second when the word completes - */ - end?: number; - }[]; - vtt?: string; -} -declare abstract class Base_Ai_Cf_Openai_Whisper { - inputs: Ai_Cf_Openai_Whisper_Input; - postProcessedOutputs: Ai_Cf_Openai_Whisper_Output; -} -type Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input = string | { - /** - * The input text prompt for the model to generate a response. - */ - prompt?: string; - /** - * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. - */ - raw?: boolean; - /** - * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. - */ - top_p?: number; - /** - * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. - */ - top_k?: number; - /** - * Random seed for reproducibility of the generation. - */ - seed?: number; - /** - * Penalty for repeated tokens; higher values discourage repetition. - */ - repetition_penalty?: number; - /** - * Decreases the likelihood of the model repeating the same lines verbatim. - */ - frequency_penalty?: number; - /** - * Increases the likelihood of the model introducing new topics. - */ - presence_penalty?: number; - image: number[] | (string & NonNullable); - /** - * The maximum number of tokens to generate in the response. - */ - max_tokens?: number; -}; -interface Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output { - description?: string; -} -declare abstract class Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M { - inputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Input; - postProcessedOutputs: Ai_Cf_Unum_Uform_Gen2_Qwen_500M_Output; -} -type Ai_Cf_Openai_Whisper_Tiny_En_Input = string | { - /** - * An array of integers that represent the audio data constrained to 8-bit unsigned integer values - */ - audio: number[]; -}; -interface Ai_Cf_Openai_Whisper_Tiny_En_Output { - /** - * The transcription - */ - text: string; - word_count?: number; - words?: { - word?: string; - /** - * The second this word begins in the recording - */ - start?: number; - /** - * The ending second when the word completes - */ - end?: number; - }[]; - vtt?: string; -} -declare abstract class Base_Ai_Cf_Openai_Whisper_Tiny_En { - inputs: Ai_Cf_Openai_Whisper_Tiny_En_Input; - postProcessedOutputs: Ai_Cf_Openai_Whisper_Tiny_En_Output; -} -interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input { - /** - * Base64 encoded value of the audio data. - */ - audio: string; - /** - * Supported tasks are 'translate' or 'transcribe'. - */ - task?: string; - /** - * The language of the audio being transcribed or translated. - */ - language?: string; - /** - * Preprocess the audio with a voice activity detection model. - */ - vad_filter?: string; - /** - * A text prompt to help provide context to the model on the contents of the audio. - */ - initial_prompt?: string; - /** - * The prefix it appended the the beginning of the output of the transcription and can guide the transcription result. - */ - prefix?: string; -} -interface Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output { - transcription_info?: { - /** - * The language of the audio being transcribed or translated. - */ - language?: string; - /** - * The confidence level or probability of the detected language being accurate, represented as a decimal between 0 and 1. - */ - language_probability?: number; - /** - * The total duration of the original audio file, in seconds. - */ - duration?: number; - /** - * The duration of the audio after applying Voice Activity Detection (VAD) to remove silent or irrelevant sections, in seconds. - */ - duration_after_vad?: number; - }; - /** - * The complete transcription of the audio. - */ - text: string; - /** - * The total number of words in the transcription. - */ - word_count?: number; - segments?: { - /** - * The starting time of the segment within the audio, in seconds. - */ - start?: number; - /** - * The ending time of the segment within the audio, in seconds. - */ - end?: number; - /** - * The transcription of the segment. - */ - text?: string; - /** - * The temperature used in the decoding process, controlling randomness in predictions. Lower values result in more deterministic outputs. - */ - temperature?: number; - /** - * The average log probability of the predictions for the words in this segment, indicating overall confidence. - */ - avg_logprob?: number; - /** - * The compression ratio of the input to the output, measuring how much the text was compressed during the transcription process. - */ - compression_ratio?: number; - /** - * The probability that the segment contains no speech, represented as a decimal between 0 and 1. - */ - no_speech_prob?: number; - words?: { - /** - * The individual word transcribed from the audio. - */ - word?: string; - /** - * The starting time of the word within the audio, in seconds. - */ - start?: number; - /** - * The ending time of the word within the audio, in seconds. - */ - end?: number; - }[]; - }[]; - /** - * The transcription in WebVTT format, which includes timing and text information for use in subtitles. - */ - vtt?: string; -} -declare abstract class Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo { - inputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input; - postProcessedOutputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output; -} -type Ai_Cf_Baai_Bge_M3_Input = BGEM3InputQueryAndContexts | BGEM3InputEmbedding; -interface BGEM3InputQueryAndContexts { - /** - * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts - */ - query?: string; - /** - * List of provided contexts. Note that the index in this array is important, as the response will refer to it. - */ - contexts: { - /** - * One of the provided context content - */ - text?: string; - }[]; - /** - * When provided with too long context should the model error out or truncate the context to fit? - */ - truncate_inputs?: boolean; -} -interface BGEM3InputEmbedding { - text: string | string[]; - /** - * When provided with too long context should the model error out or truncate the context to fit? - */ - truncate_inputs?: boolean; -} -type Ai_Cf_Baai_Bge_M3_Output = BGEM3OuputQuery | BGEM3OutputEmbeddingForContexts | BGEM3OuputEmbedding; -interface BGEM3OuputQuery { - response?: { - /** - * Index of the context in the request - */ - id?: number; - /** - * Score of the context under the index. - */ - score?: number; - }[]; -} -interface BGEM3OutputEmbeddingForContexts { - response?: number[][]; - shape?: number[]; - /** - * The pooling method used in the embedding process. - */ - pooling?: "mean" | "cls"; -} -interface BGEM3OuputEmbedding { - shape?: number[]; - /** - * Embeddings of the requested text values - */ - data?: number[][]; - /** - * The pooling method used in the embedding process. - */ - pooling?: "mean" | "cls"; -} -declare abstract class Base_Ai_Cf_Baai_Bge_M3 { - inputs: Ai_Cf_Baai_Bge_M3_Input; - postProcessedOutputs: Ai_Cf_Baai_Bge_M3_Output; -} -interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input { - /** - * A text description of the image you want to generate. - */ - prompt: string; - /** - * The number of diffusion steps; higher values can improve quality but take longer. - */ - steps?: number; -} -interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output { - /** - * The generated image in Base64 format. - */ - image?: string; -} -declare abstract class Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell { - inputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input; - postProcessedOutputs: Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Output; -} -type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input = Prompt | Messages; -interface Prompt { - /** - * The input text prompt for the model to generate a response. - */ - prompt: string; - image?: number[] | (string & NonNullable); - /** - * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. - */ - raw?: boolean; - /** - * If true, the response will be streamed back incrementally using SSE, Server Sent Events. - */ - stream?: boolean; - /** - * The maximum number of tokens to generate in the response. - */ - max_tokens?: number; - /** - * Controls the randomness of the output; higher values produce more random results. - */ - temperature?: number; - /** - * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. - */ - top_p?: number; - /** - * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. - */ - top_k?: number; - /** - * Random seed for reproducibility of the generation. - */ - seed?: number; - /** - * Penalty for repeated tokens; higher values discourage repetition. - */ - repetition_penalty?: number; - /** - * Decreases the likelihood of the model repeating the same lines verbatim. - */ - frequency_penalty?: number; - /** - * Increases the likelihood of the model introducing new topics. - */ - presence_penalty?: number; - /** - * Name of the LoRA (Low-Rank Adaptation) model to fine-tune the base model. - */ - lora?: string; -} -interface Messages { - /** - * An array of message objects representing the conversation history. - */ - messages: { - /** - * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). - */ - role: string; - /** - * The content of the message as a string. - */ - content: string; - }[]; - image?: number[] | string; - functions?: { - name: string; - code: string; - }[]; - /** - * A list of tools available for the assistant to use. - */ - tools?: ({ - /** - * The name of the tool. More descriptive the better. - */ - name: string; - /** - * A brief description of what the tool does. - */ - description: string; - /** - * Schema defining the parameters accepted by the tool. - */ - parameters: { - /** - * The type of the parameters object (usually 'object'). - */ - type: string; - /** - * List of required parameter names. - */ - required?: string[]; - /** - * Definitions of each parameter. - */ - properties: { - [k: string]: { - /** - * The data type of the parameter. - */ - type: string; - /** - * A description of the expected parameter. - */ - description: string; - }; - }; - }; - } | { - /** - * Specifies the type of tool (e.g., 'function'). - */ - type: string; - /** - * Details of the function tool. - */ - function: { - /** - * The name of the function. - */ - name: string; - /** - * A brief description of what the function does. - */ - description: string; - /** - * Schema defining the parameters accepted by the function. - */ - parameters: { - /** - * The type of the parameters object (usually 'object'). - */ - type: string; - /** - * List of required parameter names. - */ - required?: string[]; - /** - * Definitions of each parameter. - */ - properties: { - [k: string]: { - /** - * The data type of the parameter. - */ - type: string; - /** - * A description of the expected parameter. - */ - description: string; - }; - }; - }; - }; - })[]; - /** - * If true, the response will be streamed back incrementally. - */ - stream?: boolean; - /** - * The maximum number of tokens to generate in the response. - */ - max_tokens?: number; - /** - * Controls the randomness of the output; higher values produce more random results. - */ - temperature?: number; - /** - * Controls the creativity of the AI's responses by adjusting how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. - */ - top_p?: number; - /** - * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. - */ - top_k?: number; - /** - * Random seed for reproducibility of the generation. - */ - seed?: number; - /** - * Penalty for repeated tokens; higher values discourage repetition. - */ - repetition_penalty?: number; - /** - * Decreases the likelihood of the model repeating the same lines verbatim. - */ - frequency_penalty?: number; - /** - * Increases the likelihood of the model introducing new topics. - */ - presence_penalty?: number; -} -type Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output = { - /** - * The generated text response from the model - */ - response?: string; - /** - * An array of tool calls requests made during the response generation - */ - tool_calls?: { - /** - * The arguments passed to be passed to the tool call request - */ - arguments?: object; - /** - * The name of the tool to be called - */ - name?: string; - }[]; -} | ReadableStream; -declare abstract class Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct { - inputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Input; - postProcessedOutputs: Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct_Output; -} -interface Ai_Cf_Meta_Llama_Guard_3_8B_Input { - /** - * An array of message objects representing the conversation history. - */ - messages: { - /** - * The role of the message sender must alternate between 'user' and 'assistant'. - */ - role: "user" | "assistant"; - /** - * The content of the message as a string. - */ - content: string; - }[]; - /** - * The maximum number of tokens to generate in the response. - */ - max_tokens?: number; - /** - * Controls the randomness of the output; higher values produce more random results. - */ - temperature?: number; - /** - * Dictate the output format of the generated response. - */ - response_format?: { - /** - * Set to json_object to process and output generated text as JSON. - */ - type?: string; - }; -} -interface Ai_Cf_Meta_Llama_Guard_3_8B_Output { - response?: string | { - /** - * Whether the conversation is safe or not. - */ - safe?: boolean; - /** - * A list of what hazard categories predicted for the conversation, if the conversation is deemed unsafe. - */ - categories?: string[]; - }; - /** - * Usage statistics for the inference request - */ - usage?: { - /** - * Total number of tokens in input - */ - prompt_tokens?: number; - /** - * Total number of tokens in output - */ - completion_tokens?: number; - /** - * Total number of input and output tokens - */ - total_tokens?: number; - }; -} -declare abstract class Base_Ai_Cf_Meta_Llama_Guard_3_8B { - inputs: Ai_Cf_Meta_Llama_Guard_3_8B_Input; - postProcessedOutputs: Ai_Cf_Meta_Llama_Guard_3_8B_Output; -} -interface Ai_Cf_Baai_Bge_Reranker_Base_Input { - /** - * A query you wish to perform against the provided contexts. - */ - /** - * Number of returned results starting with the best score. - */ - top_k?: number; - /** - * List of provided contexts. Note that the index in this array is important, as the response will refer to it. - */ - contexts: { - /** - * One of the provided context content - */ - text?: string; - }[]; -} -interface Ai_Cf_Baai_Bge_Reranker_Base_Output { - response?: { - /** - * Index of the context in the request - */ - id?: number; - /** - * Score of the context under the index. - */ - score?: number; - }[]; -} -declare abstract class Base_Ai_Cf_Baai_Bge_Reranker_Base { - inputs: Ai_Cf_Baai_Bge_Reranker_Base_Input; - postProcessedOutputs: Ai_Cf_Baai_Bge_Reranker_Base_Output; -} -type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input = Ai_Cf_Meta_Llama_4_Prompt | Ai_Cf_Meta_Llama_4_Messages; -interface Ai_Cf_Meta_Llama_4_Prompt { - /** - * The input text prompt for the model to generate a response. - */ - prompt: string; - /** - * JSON schema that should be fulfilled for the response. - */ - guided_json?: object; - /** - * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. - */ - raw?: boolean; - /** - * If true, the response will be streamed back incrementally using SSE, Server Sent Events. - */ - stream?: boolean; - /** - * The maximum number of tokens to generate in the response. - */ - max_tokens?: number; - /** - * Controls the randomness of the output; higher values produce more random results. - */ - temperature?: number; - /** - * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. - */ - top_p?: number; - /** - * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. - */ - top_k?: number; - /** - * Random seed for reproducibility of the generation. - */ - seed?: number; - /** - * Penalty for repeated tokens; higher values discourage repetition. - */ - repetition_penalty?: number; - /** - * Decreases the likelihood of the model repeating the same lines verbatim. - */ - frequency_penalty?: number; - /** - * Increases the likelihood of the model introducing new topics. - */ - presence_penalty?: number; -} -interface Ai_Cf_Meta_Llama_4_Messages { - /** - * An array of message objects representing the conversation history. - */ - messages: { - /** - * The role of the message sender (e.g., 'user', 'assistant', 'system', 'tool'). - */ - role?: string; - /** - * The tool call id. Must be supplied for tool calls for Mistral-3. If you don't know what to put here you can fall back to 000000001 - */ - tool_call_id?: string; - content?: string | { - /** - * Type of the content provided - */ - type?: string; - text?: string; - image_url?: { - /** - * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted - */ - url?: string; - }; - }[] | { - /** - * Type of the content provided - */ - type?: string; - text?: string; - image_url?: { - /** - * image uri with data (e.g. data:image/jpeg;base64,/9j/...). HTTP URL will not be accepted - */ - url?: string; - }; - }; - }[]; - functions?: { - name: string; - code: string; - }[]; - /** - * A list of tools available for the assistant to use. - */ - tools?: ({ - /** - * The name of the tool. More descriptive the better. - */ - name: string; - /** - * A brief description of what the tool does. - */ - description: string; - /** - * Schema defining the parameters accepted by the tool. - */ - parameters: { - /** - * The type of the parameters object (usually 'object'). - */ - type: string; - /** - * List of required parameter names. - */ - required?: string[]; - /** - * Definitions of each parameter. - */ - properties: { - [k: string]: { - /** - * The data type of the parameter. - */ - type: string; - /** - * A description of the expected parameter. - */ - description: string; - }; - }; - }; - } | { - /** - * Specifies the type of tool (e.g., 'function'). - */ - type: string; - /** - * Details of the function tool. - */ - function: { - /** - * The name of the function. - */ - name: string; - /** - * A brief description of what the function does. - */ - description: string; - /** - * Schema defining the parameters accepted by the function. - */ - parameters: { - /** - * The type of the parameters object (usually 'object'). - */ - type: string; - /** - * List of required parameter names. - */ - required?: string[]; - /** - * Definitions of each parameter. - */ - properties: { - [k: string]: { - /** - * The data type of the parameter. - */ - type: string; - /** - * A description of the expected parameter. - */ - description: string; - }; - }; - }; - }; - })[]; - /** - * JSON schema that should be fufilled for the response. - */ - guided_json?: object; - /** - * If true, a chat template is not applied and you must adhere to the specific model's expected formatting. - */ - raw?: boolean; - /** - * If true, the response will be streamed back incrementally using SSE, Server Sent Events. - */ - stream?: boolean; - /** - * The maximum number of tokens to generate in the response. - */ - max_tokens?: number; - /** - * Controls the randomness of the output; higher values produce more random results. - */ - temperature?: number; - /** - * Adjusts the creativity of the AI's responses by controlling how many possible words it considers. Lower values make outputs more predictable; higher values allow for more varied and creative responses. - */ - top_p?: number; - /** - * Limits the AI to choose from the top 'k' most probable words. Lower values make responses more focused; higher values introduce more variety and potential surprises. - */ - top_k?: number; - /** - * Random seed for reproducibility of the generation. - */ - seed?: number; - /** - * Penalty for repeated tokens; higher values discourage repetition. - */ - repetition_penalty?: number; - /** - * Decreases the likelihood of the model repeating the same lines verbatim. - */ - frequency_penalty?: number; - /** - * Increases the likelihood of the model introducing new topics. - */ - presence_penalty?: number; -} -type Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output = { - /** - * The generated text response from the model - */ - response: string; - /** - * Usage statistics for the inference request - */ - usage?: { - /** - * Total number of tokens in input - */ - prompt_tokens?: number; - /** - * Total number of tokens in output - */ - completion_tokens?: number; - /** - * Total number of input and output tokens - */ - total_tokens?: number; - }; - /** - * An array of tool calls requests made during the response generation - */ - tool_calls?: { - /** - * The arguments passed to be passed to the tool call request - */ - arguments?: object; - /** - * The name of the tool to be called - */ - name?: string; - }[]; -} | string; -declare abstract class Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct { - inputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Input; - postProcessedOutputs: Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct_Output; -} -interface AiModels { - "@cf/huggingface/distilbert-sst-2-int8": BaseAiTextClassification; - "@cf/stabilityai/stable-diffusion-xl-base-1.0": BaseAiTextToImage; - "@cf/runwayml/stable-diffusion-v1-5-inpainting": BaseAiTextToImage; - "@cf/runwayml/stable-diffusion-v1-5-img2img": BaseAiTextToImage; - "@cf/lykon/dreamshaper-8-lcm": BaseAiTextToImage; - "@cf/bytedance/stable-diffusion-xl-lightning": BaseAiTextToImage; - "@cf/myshell-ai/melotts": BaseAiTextToSpeech; - "@cf/baai/bge-base-en-v1.5": BaseAiTextEmbeddings; - "@cf/baai/bge-small-en-v1.5": BaseAiTextEmbeddings; - "@cf/baai/bge-large-en-v1.5": BaseAiTextEmbeddings; - "@cf/microsoft/resnet-50": BaseAiImageClassification; - "@cf/facebook/detr-resnet-50": BaseAiObjectDetection; - "@cf/meta/llama-2-7b-chat-int8": BaseAiTextGeneration; - "@cf/mistral/mistral-7b-instruct-v0.1": BaseAiTextGeneration; - "@cf/meta/llama-2-7b-chat-fp16": BaseAiTextGeneration; - "@hf/thebloke/llama-2-13b-chat-awq": BaseAiTextGeneration; - "@hf/thebloke/mistral-7b-instruct-v0.1-awq": BaseAiTextGeneration; - "@hf/thebloke/zephyr-7b-beta-awq": BaseAiTextGeneration; - "@hf/thebloke/openhermes-2.5-mistral-7b-awq": BaseAiTextGeneration; - "@hf/thebloke/neural-chat-7b-v3-1-awq": BaseAiTextGeneration; - "@hf/thebloke/llamaguard-7b-awq": BaseAiTextGeneration; - "@hf/thebloke/deepseek-coder-6.7b-base-awq": BaseAiTextGeneration; - "@hf/thebloke/deepseek-coder-6.7b-instruct-awq": BaseAiTextGeneration; - "@cf/deepseek-ai/deepseek-math-7b-instruct": BaseAiTextGeneration; - "@cf/defog/sqlcoder-7b-2": BaseAiTextGeneration; - "@cf/openchat/openchat-3.5-0106": BaseAiTextGeneration; - "@cf/tiiuae/falcon-7b-instruct": BaseAiTextGeneration; - "@cf/thebloke/discolm-german-7b-v1-awq": BaseAiTextGeneration; - "@cf/qwen/qwen1.5-0.5b-chat": BaseAiTextGeneration; - "@cf/qwen/qwen1.5-7b-chat-awq": BaseAiTextGeneration; - "@cf/qwen/qwen1.5-14b-chat-awq": BaseAiTextGeneration; - "@cf/tinyllama/tinyllama-1.1b-chat-v1.0": BaseAiTextGeneration; - "@cf/microsoft/phi-2": BaseAiTextGeneration; - "@cf/qwen/qwen1.5-1.8b-chat": BaseAiTextGeneration; - "@cf/mistral/mistral-7b-instruct-v0.2-lora": BaseAiTextGeneration; - "@hf/nousresearch/hermes-2-pro-mistral-7b": BaseAiTextGeneration; - "@hf/nexusflow/starling-lm-7b-beta": BaseAiTextGeneration; - "@hf/google/gemma-7b-it": BaseAiTextGeneration; - "@cf/meta-llama/llama-2-7b-chat-hf-lora": BaseAiTextGeneration; - "@cf/google/gemma-2b-it-lora": BaseAiTextGeneration; - "@cf/google/gemma-7b-it-lora": BaseAiTextGeneration; - "@hf/mistral/mistral-7b-instruct-v0.2": BaseAiTextGeneration; - "@cf/meta/llama-3-8b-instruct": BaseAiTextGeneration; - "@cf/fblgit/una-cybertron-7b-v2-bf16": BaseAiTextGeneration; - "@cf/meta/llama-3-8b-instruct-awq": BaseAiTextGeneration; - "@hf/meta-llama/meta-llama-3-8b-instruct": BaseAiTextGeneration; - "@cf/meta/llama-3.1-8b-instruct": BaseAiTextGeneration; - "@cf/meta/llama-3.1-8b-instruct-fp8": BaseAiTextGeneration; - "@cf/meta/llama-3.1-8b-instruct-awq": BaseAiTextGeneration; - "@cf/meta/llama-3.2-3b-instruct": BaseAiTextGeneration; - "@cf/meta/llama-3.2-1b-instruct": BaseAiTextGeneration; - "@cf/meta/llama-3.3-70b-instruct-fp8-fast": BaseAiTextGeneration; - "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b": BaseAiTextGeneration; - "@cf/meta/m2m100-1.2b": BaseAiTranslation; - "@cf/facebook/bart-large-cnn": BaseAiSummarization; - "@cf/llava-hf/llava-1.5-7b-hf": BaseAiImageToText; - "@cf/openai/whisper": Base_Ai_Cf_Openai_Whisper; - "@cf/unum/uform-gen2-qwen-500m": Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M; - "@cf/openai/whisper-tiny-en": Base_Ai_Cf_Openai_Whisper_Tiny_En; - "@cf/openai/whisper-large-v3-turbo": Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo; - "@cf/baai/bge-m3": Base_Ai_Cf_Baai_Bge_M3; - "@cf/black-forest-labs/flux-1-schnell": Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell; - "@cf/meta/llama-3.2-11b-vision-instruct": Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct; - "@cf/meta/llama-guard-3-8b": Base_Ai_Cf_Meta_Llama_Guard_3_8B; - "@cf/baai/bge-reranker-base": Base_Ai_Cf_Baai_Bge_Reranker_Base; - "@cf/meta/llama-4-scout-17b-16e-instruct": Base_Ai_Cf_Meta_Llama_4_Scout_17B_16E_Instruct; -} -type AiOptions = { - gateway?: GatewayOptions; - returnRawResponse?: boolean; - prefix?: string; - extraHeaders?: object; -}; -type ConversionResponse = { - name: string; - mimeType: string; - format: "markdown"; - tokens: number; - data: string; -}; -type AiModelsSearchParams = { - author?: string; - hide_experimental?: boolean; - page?: number; - per_page?: number; - search?: string; - source?: number; - task?: string; -}; -type AiModelsSearchObject = { - id: string; - source: number; - name: string; - description: string; - task: { - id: string; - name: string; - description: string; - }; - tags: string[]; - properties: { - property_id: string; - value: string; - }[]; -}; -interface InferenceUpstreamError extends Error { -} -interface AiInternalError extends Error { -} -type AiModelListType = Record; -declare abstract class Ai { - aiGatewayLogId: string | null; - gateway(gatewayId: string): AiGateway; - autorag(autoragId: string): AutoRAG; - run(model: Name, inputs: AiModelList[Name]["inputs"], options?: Options): Promise; - models(params?: AiModelsSearchParams): Promise; - toMarkdown(files: { - name: string; - blob: Blob; - }[], options?: { - gateway?: GatewayOptions; - extraHeaders?: object; - }): Promise; - toMarkdown(files: { - name: string; - blob: Blob; - }, options?: { - gateway?: GatewayOptions; - extraHeaders?: object; - }): Promise; -} -type GatewayOptions = { - id: string; - cacheKey?: string; - cacheTtl?: number; - skipCache?: boolean; - metadata?: Record; - collectLog?: boolean; -}; -type AiGatewayPatchLog = { - score?: number | null; - feedback?: -1 | 1 | null; - metadata?: Record | null; -}; -type AiGatewayLog = { - id: string; - provider: string; - model: string; - model_type?: string; - path: string; - duration: number; - request_type?: string; - request_content_type?: string; - status_code: number; - response_content_type?: string; - success: boolean; - cached: boolean; - tokens_in?: number; - tokens_out?: number; - metadata?: Record; - step?: number; - cost?: number; - custom_cost?: boolean; - request_size: number; - request_head?: string; - request_head_complete: boolean; - response_size: number; - response_head?: string; - response_head_complete: boolean; - created_at: Date; -}; -type AIGatewayProviders = "workers-ai" | "anthropic" | "aws-bedrock" | "azure-openai" | "google-vertex-ai" | "huggingface" | "openai" | "perplexity-ai" | "replicate" | "groq" | "cohere" | "google-ai-studio" | "mistral" | "grok" | "openrouter" | "deepseek" | "cerebras" | "cartesia" | "elevenlabs" | "adobe-firefly"; -type AIGatewayHeaders = { - "cf-aig-metadata": Record | string; - "cf-aig-custom-cost": { - per_token_in?: number; - per_token_out?: number; - } | { - total_cost?: number; - } | string; - "cf-aig-cache-ttl": number | string; - "cf-aig-skip-cache": boolean | string; - "cf-aig-cache-key": string; - "cf-aig-collect-log": boolean | string; - Authorization: string; - "Content-Type": string; - [key: string]: string | number | boolean | object; -}; -type AIGatewayUniversalRequest = { - provider: AIGatewayProviders | string; // eslint-disable-line - endpoint: string; - headers: Partial; - query: unknown; -}; -interface AiGatewayInternalError extends Error { -} -interface AiGatewayLogNotFound extends Error { -} -declare abstract class AiGateway { - patchLog(logId: string, data: AiGatewayPatchLog): Promise; - getLog(logId: string): Promise; - run(data: AIGatewayUniversalRequest | AIGatewayUniversalRequest[]): Promise; - getUrl(provider?: AIGatewayProviders | string): Promise; // eslint-disable-line -} -interface AutoRAGInternalError extends Error { -} -interface AutoRAGNotFoundError extends Error { -} -interface AutoRAGUnauthorizedError extends Error { -} -type AutoRagSearchRequest = { - query: string; - max_num_results?: number; - ranking_options?: { - ranker?: string; - score_threshold?: number; - }; - rewrite_query?: boolean; -}; -type AutoRagSearchResponse = { - object: "vector_store.search_results.page"; - search_query: string; - data: { - file_id: string; - filename: string; - score: number; - attributes: Record; - content: { - type: "text"; - text: string; - }[]; - }[]; - has_more: boolean; - next_page: string | null; -}; -type AutoRagAiSearchResponse = AutoRagSearchResponse & { - response: string; -}; -declare abstract class AutoRAG { - search(params: AutoRagSearchRequest): Promise; - aiSearch(params: AutoRagSearchRequest): Promise; -} -interface BasicImageTransformations { - /** - * Maximum width in image pixels. The value must be an integer. - */ - width?: number; - /** - * Maximum height in image pixels. The value must be an integer. - */ - height?: number; - /** - * Resizing mode as a string. It affects interpretation of width and height - * options: - * - scale-down: Similar to contain, but the image is never enlarged. If - * the image is larger than given width or height, it will be resized. - * Otherwise its original size will be kept. - * - contain: Resizes to maximum size that fits within the given width and - * height. If only a single dimension is given (e.g. only width), the - * image will be shrunk or enlarged to exactly match that dimension. - * Aspect ratio is always preserved. - * - cover: Resizes (shrinks or enlarges) to fill the entire area of width - * and height. If the image has an aspect ratio different from the ratio - * of width and height, it will be cropped to fit. - * - crop: The image will be shrunk and cropped to fit within the area - * specified by width and height. The image will not be enlarged. For images - * smaller than the given dimensions it's the same as scale-down. For - * images larger than the given dimensions, it's the same as cover. - * See also trim. - * - pad: Resizes to the maximum size that fits within the given width and - * height, and then fills the remaining area with a background color - * (white by default). Use of this mode is not recommended, as the same - * effect can be more efficiently achieved with the contain mode and the - * CSS object-fit: contain property. - * - squeeze: Stretches and deforms to the width and height given, even if it - * breaks aspect ratio - */ - fit?: "scale-down" | "contain" | "cover" | "crop" | "pad" | "squeeze"; - /** - * When cropping with fit: "cover", this defines the side or point that should - * be left uncropped. The value is either a string - * "left", "right", "top", "bottom", "auto", or "center" (the default), - * or an object {x, y} containing focal point coordinates in the original - * image expressed as fractions ranging from 0.0 (top or left) to 1.0 - * (bottom or right), 0.5 being the center. {fit: "cover", gravity: "top"} will - * crop bottom or left and right sides as necessary, but won’t crop anything - * from the top. {fit: "cover", gravity: {x:0.5, y:0.2}} will crop each side to - * preserve as much as possible around a point at 20% of the height of the - * source image. - */ - gravity?: 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | BasicImageTransformationsGravityCoordinates; - /** - * Background color to add underneath the image. Applies only to images with - * transparency (such as PNG). Accepts any CSS color (#RRGGBB, rgba(…), - * hsl(…), etc.) - */ - background?: string; - /** - * Number of degrees (90, 180, 270) to rotate the image by. width and height - * options refer to axes after rotation. - */ - rotate?: 0 | 90 | 180 | 270 | 360; -} -interface BasicImageTransformationsGravityCoordinates { - x?: number; - y?: number; - mode?: 'remainder' | 'box-center'; -} -/** - * In addition to the properties you can set in the RequestInit dict - * that you pass as an argument to the Request constructor, you can - * set certain properties of a `cf` object to control how Cloudflare - * features are applied to that new Request. - * - * Note: Currently, these properties cannot be tested in the - * playground. - */ -interface RequestInitCfProperties extends Record { - cacheEverything?: boolean; - /** - * A request's cache key is what determines if two requests are - * "the same" for caching purposes. If a request has the same cache key - * as some previous request, then we can serve the same cached response for - * both. (e.g. 'some-key') - * - * Only available for Enterprise customers. - */ - cacheKey?: string; - /** - * This allows you to append additional Cache-Tag response headers - * to the origin response without modifications to the origin server. - * This will allow for greater control over the Purge by Cache Tag feature - * utilizing changes only in the Workers process. - * - * Only available for Enterprise customers. - */ - cacheTags?: string[]; - /** - * Force response to be cached for a given number of seconds. (e.g. 300) - */ - cacheTtl?: number; - /** - * Force response to be cached for a given number of seconds based on the Origin status code. - * (e.g. { '200-299': 86400, '404': 1, '500-599': 0 }) - */ - cacheTtlByStatus?: Record; - scrapeShield?: boolean; - apps?: boolean; - image?: RequestInitCfPropertiesImage; - minify?: RequestInitCfPropertiesImageMinify; - mirage?: boolean; - polish?: "lossy" | "lossless" | "off"; - r2?: RequestInitCfPropertiesR2; - /** - * Redirects the request to an alternate origin server. You can use this, - * for example, to implement load balancing across several origins. - * (e.g.us-east.example.com) - * - * Note - For security reasons, the hostname set in resolveOverride must - * be proxied on the same Cloudflare zone of the incoming request. - * Otherwise, the setting is ignored. CNAME hosts are allowed, so to - * resolve to a host under a different domain or a DNS only domain first - * declare a CNAME record within your own zone’s DNS mapping to the - * external hostname, set proxy on Cloudflare, then set resolveOverride - * to point to that CNAME record. - */ - resolveOverride?: string; -} -interface RequestInitCfPropertiesImageDraw extends BasicImageTransformations { - /** - * Absolute URL of the image file to use for the drawing. It can be any of - * the supported file formats. For drawing of watermarks or non-rectangular - * overlays we recommend using PNG or WebP images. - */ - url: string; - /** - * Floating-point number between 0 (transparent) and 1 (opaque). - * For example, opacity: 0.5 makes overlay semitransparent. - */ - opacity?: number; - /** - * - If set to true, the overlay image will be tiled to cover the entire - * area. This is useful for stock-photo-like watermarks. - * - If set to "x", the overlay image will be tiled horizontally only - * (form a line). - * - If set to "y", the overlay image will be tiled vertically only - * (form a line). - */ - repeat?: true | "x" | "y"; - /** - * Position of the overlay image relative to a given edge. Each property is - * an offset in pixels. 0 aligns exactly to the edge. For example, left: 10 - * positions left side of the overlay 10 pixels from the left edge of the - * image it's drawn over. bottom: 0 aligns bottom of the overlay with bottom - * of the background image. - * - * Setting both left & right, or both top & bottom is an error. - * - * If no position is specified, the image will be centered. - */ - top?: number; - left?: number; - bottom?: number; - right?: number; -} -interface RequestInitCfPropertiesImage extends BasicImageTransformations { - /** - * Device Pixel Ratio. Default 1. Multiplier for width/height that makes it - * easier to specify higher-DPI sizes in . - */ - dpr?: number; - /** - * Allows you to trim your image. Takes dpr into account and is performed before - * resizing or rotation. - * - * It can be used as: - * - left, top, right, bottom - it will specify the number of pixels to cut - * off each side - * - width, height - the width/height you'd like to end up with - can be used - * in combination with the properties above - * - border - this will automatically trim the surroundings of an image based on - * it's color. It consists of three properties: - * - color: rgb or hex representation of the color you wish to trim (todo: verify the rgba bit) - * - tolerance: difference from color to treat as color - * - keep: the number of pixels of border to keep - */ - trim?: "border" | { - top?: number; - bottom?: number; - left?: number; - right?: number; - width?: number; - height?: number; - border?: boolean | { - color?: string; - tolerance?: number; - keep?: number; - }; - }; - /** - * Quality setting from 1-100 (useful values are in 60-90 range). Lower values - * make images look worse, but load faster. The default is 85. It applies only - * to JPEG and WebP images. It doesn’t have any effect on PNG. - */ - quality?: number | "low" | "medium-low" | "medium-high" | "high"; - /** - * Output format to generate. It can be: - * - avif: generate images in AVIF format. - * - webp: generate images in Google WebP format. Set quality to 100 to get - * the WebP-lossless format. - * - json: instead of generating an image, outputs information about the - * image, in JSON format. The JSON object will contain image size - * (before and after resizing), source image’s MIME type, file size, etc. - * - jpeg: generate images in JPEG format. - * - png: generate images in PNG format. - */ - format?: "avif" | "webp" | "json" | "jpeg" | "png" | "baseline-jpeg" | "png-force" | "svg"; - /** - * Whether to preserve animation frames from input files. Default is true. - * Setting it to false reduces animations to still images. This setting is - * recommended when enlarging images or processing arbitrary user content, - * because large GIF animations can weigh tens or even hundreds of megabytes. - * It is also useful to set anim:false when using format:"json" to get the - * response quicker without the number of frames. - */ - anim?: boolean; - /** - * What EXIF data should be preserved in the output image. Note that EXIF - * rotation and embedded color profiles are always applied ("baked in" into - * the image), and aren't affected by this option. Note that if the Polish - * feature is enabled, all metadata may have been removed already and this - * option may have no effect. - * - keep: Preserve most of EXIF metadata, including GPS location if there's - * any. - * - copyright: Only keep the copyright tag, and discard everything else. - * This is the default behavior for JPEG files. - * - none: Discard all invisible EXIF metadata. Currently WebP and PNG - * output formats always discard metadata. - */ - metadata?: "keep" | "copyright" | "none"; - /** - * Strength of sharpening filter to apply to the image. Floating-point - * number between 0 (no sharpening, default) and 10 (maximum). 1.0 is a - * recommended value for downscaled images. - */ - sharpen?: number; - /** - * Radius of a blur filter (approximate gaussian). Maximum supported radius - * is 250. - */ - blur?: number; - /** - * Overlays are drawn in the order they appear in the array (last array - * entry is the topmost layer). - */ - draw?: RequestInitCfPropertiesImageDraw[]; - /** - * Fetching image from authenticated origin. Setting this property will - * pass authentication headers (Authorization, Cookie, etc.) through to - * the origin. - */ - "origin-auth"?: "share-publicly"; - /** - * Adds a border around the image. The border is added after resizing. Border - * width takes dpr into account, and can be specified either using a single - * width property, or individually for each side. - */ - border?: { - color: string; - width: number; - } | { - color: string; - top: number; - right: number; - bottom: number; - left: number; - }; - /** - * Increase brightness by a factor. A value of 1.0 equals no change, a value - * of 0.5 equals half brightness, and a value of 2.0 equals twice as bright. - * 0 is ignored. - */ - brightness?: number; - /** - * Increase contrast by a factor. A value of 1.0 equals no change, a value of - * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is - * ignored. - */ - contrast?: number; - /** - * Increase exposure by a factor. A value of 1.0 equals no change, a value of - * 0.5 darkens the image, and a value of 2.0 lightens the image. 0 is ignored. - */ - gamma?: number; - /** - * Increase contrast by a factor. A value of 1.0 equals no change, a value of - * 0.5 equals low contrast, and a value of 2.0 equals high contrast. 0 is - * ignored. - */ - saturation?: number; - /** - * Flips the images horizontally, vertically, or both. Flipping is applied before - * rotation, so if you apply flip=h,rotate=90 then the image will be flipped - * horizontally, then rotated by 90 degrees. - */ - flip?: 'h' | 'v' | 'hv'; - /** - * Slightly reduces latency on a cache miss by selecting a - * quickest-to-compress file format, at a cost of increased file size and - * lower image quality. It will usually override the format option and choose - * JPEG over WebP or AVIF. We do not recommend using this option, except in - * unusual circumstances like resizing uncacheable dynamically-generated - * images. - */ - compression?: "fast"; -} -interface RequestInitCfPropertiesImageMinify { - javascript?: boolean; - css?: boolean; - html?: boolean; -} -interface RequestInitCfPropertiesR2 { - /** - * Colo id of bucket that an object is stored in - */ - bucketColoId?: number; -} -/** - * Request metadata provided by Cloudflare's edge. - */ -type IncomingRequestCfProperties = IncomingRequestCfPropertiesBase & IncomingRequestCfPropertiesBotManagementEnterprise & IncomingRequestCfPropertiesCloudflareForSaaSEnterprise & IncomingRequestCfPropertiesGeographicInformation & IncomingRequestCfPropertiesCloudflareAccessOrApiShield; -interface IncomingRequestCfPropertiesBase extends Record { - /** - * [ASN](https://www.iana.org/assignments/as-numbers/as-numbers.xhtml) of the incoming request. - * - * @example 395747 - */ - asn: number; - /** - * The organization which owns the ASN of the incoming request. - * - * @example "Google Cloud" - */ - asOrganization: string; - /** - * The original value of the `Accept-Encoding` header if Cloudflare modified it. - * - * @example "gzip, deflate, br" - */ - clientAcceptEncoding?: string; - /** - * The number of milliseconds it took for the request to reach your worker. - * - * @example 22 - */ - clientTcpRtt?: number; - /** - * The three-letter [IATA](https://en.wikipedia.org/wiki/IATA_airport_code) - * airport code of the data center that the request hit. - * - * @example "DFW" - */ - colo: string; - /** - * Represents the upstream's response to a - * [TCP `keepalive` message](https://tldp.org/HOWTO/TCP-Keepalive-HOWTO/overview.html) - * from cloudflare. - * - * For workers with no upstream, this will always be `1`. - * - * @example 3 - */ - edgeRequestKeepAliveStatus: IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus; - /** - * The HTTP Protocol the request used. - * - * @example "HTTP/2" - */ - httpProtocol: string; - /** - * The browser-requested prioritization information in the request object. - * - * If no information was set, defaults to the empty string `""` - * - * @example "weight=192;exclusive=0;group=3;group-weight=127" - * @default "" - */ - requestPriority: string; - /** - * The TLS version of the connection to Cloudflare. - * In requests served over plaintext (without TLS), this property is the empty string `""`. - * - * @example "TLSv1.3" - */ - tlsVersion: string; - /** - * The cipher for the connection to Cloudflare. - * In requests served over plaintext (without TLS), this property is the empty string `""`. - * - * @example "AEAD-AES128-GCM-SHA256" - */ - tlsCipher: string; - /** - * Metadata containing the [`HELLO`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2) and [`FINISHED`](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9) messages from this request's TLS handshake. - * - * If the incoming request was served over plaintext (without TLS) this field is undefined. - */ - tlsExportedAuthenticator?: IncomingRequestCfPropertiesExportedAuthenticatorMetadata; -} -interface IncomingRequestCfPropertiesBotManagementBase { - /** - * Cloudflare’s [level of certainty](https://developers.cloudflare.com/bots/concepts/bot-score/) that a request comes from a bot, - * represented as an integer percentage between `1` (almost certainly a bot) and `99` (almost certainly human). - * - * @example 54 - */ - score: number; - /** - * A boolean value that is true if the request comes from a good bot, like Google or Bing. - * Most customers choose to allow this traffic. For more details, see [Traffic from known bots](https://developers.cloudflare.com/firewall/known-issues-and-faq/#how-does-firewall-rules-handle-traffic-from-known-bots). - */ - verifiedBot: boolean; - /** - * A boolean value that is true if the request originates from a - * Cloudflare-verified proxy service. - */ - corporateProxy: boolean; - /** - * A boolean value that's true if the request matches [file extensions](https://developers.cloudflare.com/bots/reference/static-resources/) for many types of static resources. - */ - staticResource: boolean; - /** - * List of IDs that correlate to the Bot Management heuristic detections made on a request (you can have multiple heuristic detections on the same request). - */ - detectionIds: number[]; -} -interface IncomingRequestCfPropertiesBotManagement { - /** - * Results of Cloudflare's Bot Management analysis - */ - botManagement: IncomingRequestCfPropertiesBotManagementBase; - /** - * Duplicate of `botManagement.score`. - * - * @deprecated - */ - clientTrustScore: number; -} -interface IncomingRequestCfPropertiesBotManagementEnterprise extends IncomingRequestCfPropertiesBotManagement { - /** - * Results of Cloudflare's Bot Management analysis - */ - botManagement: IncomingRequestCfPropertiesBotManagementBase & { - /** - * A [JA3 Fingerprint](https://developers.cloudflare.com/bots/concepts/ja3-fingerprint/) to help profile specific SSL/TLS clients - * across different destination IPs, Ports, and X509 certificates. - */ - ja3Hash: string; - }; -} -interface IncomingRequestCfPropertiesCloudflareForSaaSEnterprise { - /** - * Custom metadata set per-host in [Cloudflare for SaaS](https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/). - * - * This field is only present if you have Cloudflare for SaaS enabled on your account - * and you have followed the [required steps to enable it]((https://developers.cloudflare.com/cloudflare-for-platforms/cloudflare-for-saas/domain-support/custom-metadata/)). - */ - hostMetadata: HostMetadata; -} -interface IncomingRequestCfPropertiesCloudflareAccessOrApiShield { - /** - * Information about the client certificate presented to Cloudflare. - * - * This is populated when the incoming request is served over TLS using - * either Cloudflare Access or API Shield (mTLS) - * and the presented SSL certificate has a valid - * [Certificate Serial Number](https://ldapwiki.com/wiki/Certificate%20Serial%20Number) - * (i.e., not `null` or `""`). - * - * Otherwise, a set of placeholder values are used. - * - * The property `certPresented` will be set to `"1"` when - * the object is populated (i.e. the above conditions were met). - */ - tlsClientAuth: IncomingRequestCfPropertiesTLSClientAuth | IncomingRequestCfPropertiesTLSClientAuthPlaceholder; -} -/** - * Metadata about the request's TLS handshake - */ -interface IncomingRequestCfPropertiesExportedAuthenticatorMetadata { - /** - * The client's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal - * - * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" - */ - clientHandshake: string; - /** - * The server's [`HELLO` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.2), encoded in hexadecimal - * - * @example "44372ba35fa1270921d318f34c12f155dc87b682cf36a790cfaa3ba8737a1b5d" - */ - serverHandshake: string; - /** - * The client's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal - * - * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" - */ - clientFinished: string; - /** - * The server's [`FINISHED` message](https://www.rfc-editor.org/rfc/rfc5246#section-7.4.9), encoded in hexadecimal - * - * @example "084ee802fe1348f688220e2a6040a05b2199a761f33cf753abb1b006792d3f8b" - */ - serverFinished: string; -} -/** - * Geographic data about the request's origin. - */ -interface IncomingRequestCfPropertiesGeographicInformation { - /** - * The [ISO 3166-1 Alpha 2](https://www.iso.org/iso-3166-country-codes.html) country code the request originated from. - * - * If your worker is [configured to accept TOR connections](https://support.cloudflare.com/hc/en-us/articles/203306930-Understanding-Cloudflare-Tor-support-and-Onion-Routing), this may also be `"T1"`, indicating a request that originated over TOR. - * - * If Cloudflare is unable to determine where the request originated this property is omitted. - * - * The country code `"T1"` is used for requests originating on TOR. - * - * @example "GB" - */ - country?: Iso3166Alpha2Code | "T1"; - /** - * If present, this property indicates that the request originated in the EU - * - * @example "1" - */ - isEUCountry?: "1"; - /** - * A two-letter code indicating the continent the request originated from. - * - * @example "AN" - */ - continent?: ContinentCode; - /** - * The city the request originated from - * - * @example "Austin" - */ - city?: string; - /** - * Postal code of the incoming request - * - * @example "78701" - */ - postalCode?: string; - /** - * Latitude of the incoming request - * - * @example "30.27130" - */ - latitude?: string; - /** - * Longitude of the incoming request - * - * @example "-97.74260" - */ - longitude?: string; - /** - * Timezone of the incoming request - * - * @example "America/Chicago" - */ - timezone?: string; - /** - * If known, the ISO 3166-2 name for the first level region associated with - * the IP address of the incoming request - * - * @example "Texas" - */ - region?: string; - /** - * If known, the ISO 3166-2 code for the first-level region associated with - * the IP address of the incoming request - * - * @example "TX" - */ - regionCode?: string; - /** - * Metro code (DMA) of the incoming request - * - * @example "635" - */ - metroCode?: string; -} -/** Data about the incoming request's TLS certificate */ -interface IncomingRequestCfPropertiesTLSClientAuth { - /** Always `"1"`, indicating that the certificate was presented */ - certPresented: "1"; - /** - * Result of certificate verification. - * - * @example "FAILED:self signed certificate" - */ - certVerified: Exclude; - /** The presented certificate's revokation status. - * - * - A value of `"1"` indicates the certificate has been revoked - * - A value of `"0"` indicates the certificate has not been revoked - */ - certRevoked: "1" | "0"; - /** - * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) - * - * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" - */ - certIssuerDN: string; - /** - * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) - * - * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" - */ - certSubjectDN: string; - /** - * The certificate issuer's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) - * - * @example "CN=cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" - */ - certIssuerDNRFC2253: string; - /** - * The certificate subject's [distinguished name](https://knowledge.digicert.com/generalinformation/INFO1745.html) ([RFC 2253](https://www.rfc-editor.org/rfc/rfc2253.html) formatted) - * - * @example "CN=*.cloudflareaccess.com, C=US, ST=Texas, L=Austin, O=Cloudflare" - */ - certSubjectDNRFC2253: string; - /** The certificate issuer's distinguished name (legacy policies) */ - certIssuerDNLegacy: string; - /** The certificate subject's distinguished name (legacy policies) */ - certSubjectDNLegacy: string; - /** - * The certificate's serial number - * - * @example "00936EACBE07F201DF" - */ - certSerial: string; - /** - * The certificate issuer's serial number - * - * @example "2489002934BDFEA34" - */ - certIssuerSerial: string; - /** - * The certificate's Subject Key Identifier - * - * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" - */ - certSKI: string; - /** - * The certificate issuer's Subject Key Identifier - * - * @example "BB:AF:7E:02:3D:FA:A6:F1:3C:84:8E:AD:EE:38:98:EC:D9:32:32:D4" - */ - certIssuerSKI: string; - /** - * The certificate's SHA-1 fingerprint - * - * @example "6b9109f323999e52259cda7373ff0b4d26bd232e" - */ - certFingerprintSHA1: string; - /** - * The certificate's SHA-256 fingerprint - * - * @example "acf77cf37b4156a2708e34c4eb755f9b5dbbe5ebb55adfec8f11493438d19e6ad3f157f81fa3b98278453d5652b0c1fd1d71e5695ae4d709803a4d3f39de9dea" - */ - certFingerprintSHA256: string; - /** - * The effective starting date of the certificate - * - * @example "Dec 22 19:39:00 2018 GMT" - */ - certNotBefore: string; - /** - * The effective expiration date of the certificate - * - * @example "Dec 22 19:39:00 2018 GMT" - */ - certNotAfter: string; -} -/** Placeholder values for TLS Client Authorization */ -interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder { - certPresented: "0"; - certVerified: "NONE"; - certRevoked: "0"; - certIssuerDN: ""; - certSubjectDN: ""; - certIssuerDNRFC2253: ""; - certSubjectDNRFC2253: ""; - certIssuerDNLegacy: ""; - certSubjectDNLegacy: ""; - certSerial: ""; - certIssuerSerial: ""; - certSKI: ""; - certIssuerSKI: ""; - certFingerprintSHA1: ""; - certFingerprintSHA256: ""; - certNotBefore: ""; - certNotAfter: ""; -} -/** Possible outcomes of TLS verification */ -declare type CertVerificationStatus = -/** Authentication succeeded */ -"SUCCESS" -/** No certificate was presented */ - | "NONE" -/** Failed because the certificate was self-signed */ - | "FAILED:self signed certificate" -/** Failed because the certificate failed a trust chain check */ - | "FAILED:unable to verify the first certificate" -/** Failed because the certificate not yet valid */ - | "FAILED:certificate is not yet valid" -/** Failed because the certificate is expired */ - | "FAILED:certificate has expired" -/** Failed for another unspecified reason */ - | "FAILED"; -/** - * An upstream endpoint's response to a TCP `keepalive` message from Cloudflare. - */ -declare type IncomingRequestCfPropertiesEdgeRequestKeepAliveStatus = 0 /** Unknown */ | 1 /** no keepalives (not found) */ | 2 /** no connection re-use, opening keepalive connection failed */ | 3 /** no connection re-use, keepalive accepted and saved */ | 4 /** connection re-use, refused by the origin server (`TCP FIN`) */ | 5; /** connection re-use, accepted by the origin server */ -/** ISO 3166-1 Alpha-2 codes */ -declare type Iso3166Alpha2Code = "AD" | "AE" | "AF" | "AG" | "AI" | "AL" | "AM" | "AO" | "AQ" | "AR" | "AS" | "AT" | "AU" | "AW" | "AX" | "AZ" | "BA" | "BB" | "BD" | "BE" | "BF" | "BG" | "BH" | "BI" | "BJ" | "BL" | "BM" | "BN" | "BO" | "BQ" | "BR" | "BS" | "BT" | "BV" | "BW" | "BY" | "BZ" | "CA" | "CC" | "CD" | "CF" | "CG" | "CH" | "CI" | "CK" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CV" | "CW" | "CX" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "DO" | "DZ" | "EC" | "EE" | "EG" | "EH" | "ER" | "ES" | "ET" | "FI" | "FJ" | "FK" | "FM" | "FO" | "FR" | "GA" | "GB" | "GD" | "GE" | "GF" | "GG" | "GH" | "GI" | "GL" | "GM" | "GN" | "GP" | "GQ" | "GR" | "GS" | "GT" | "GU" | "GW" | "GY" | "HK" | "HM" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IM" | "IN" | "IO" | "IQ" | "IR" | "IS" | "IT" | "JE" | "JM" | "JO" | "JP" | "KE" | "KG" | "KH" | "KI" | "KM" | "KN" | "KP" | "KR" | "KW" | "KY" | "KZ" | "LA" | "LB" | "LC" | "LI" | "LK" | "LR" | "LS" | "LT" | "LU" | "LV" | "LY" | "MA" | "MC" | "MD" | "ME" | "MF" | "MG" | "MH" | "MK" | "ML" | "MM" | "MN" | "MO" | "MP" | "MQ" | "MR" | "MS" | "MT" | "MU" | "MV" | "MW" | "MX" | "MY" | "MZ" | "NA" | "NC" | "NE" | "NF" | "NG" | "NI" | "NL" | "NO" | "NP" | "NR" | "NU" | "NZ" | "OM" | "PA" | "PE" | "PF" | "PG" | "PH" | "PK" | "PL" | "PM" | "PN" | "PR" | "PS" | "PT" | "PW" | "PY" | "QA" | "RE" | "RO" | "RS" | "RU" | "RW" | "SA" | "SB" | "SC" | "SD" | "SE" | "SG" | "SH" | "SI" | "SJ" | "SK" | "SL" | "SM" | "SN" | "SO" | "SR" | "SS" | "ST" | "SV" | "SX" | "SY" | "SZ" | "TC" | "TD" | "TF" | "TG" | "TH" | "TJ" | "TK" | "TL" | "TM" | "TN" | "TO" | "TR" | "TT" | "TV" | "TW" | "TZ" | "UA" | "UG" | "UM" | "US" | "UY" | "UZ" | "VA" | "VC" | "VE" | "VG" | "VI" | "VN" | "VU" | "WF" | "WS" | "YE" | "YT" | "ZA" | "ZM" | "ZW"; -/** The 2-letter continent codes Cloudflare uses */ -declare type ContinentCode = "AF" | "AN" | "AS" | "EU" | "NA" | "OC" | "SA"; -type CfProperties = IncomingRequestCfProperties | RequestInitCfProperties; -interface D1Meta { - duration: number; - size_after: number; - rows_read: number; - rows_written: number; - last_row_id: number; - changed_db: boolean; - changes: number; - /** - * The region of the database instance that executed the query. - */ - served_by_region?: string; - /** - * True if-and-only-if the database instance that executed the query was the primary. - */ - served_by_primary?: boolean; - timings?: { - /** - * The duration of the SQL query execution by the database instance. It doesn't include any network time. - */ - sql_duration_ms: number; - }; -} -interface D1Response { - success: true; - meta: D1Meta & Record; - error?: never; -} -type D1Result = D1Response & { - results: T[]; -}; -interface D1ExecResult { - count: number; - duration: number; -} -type D1SessionConstraint = -// Indicates that the first query should go to the primary, and the rest queries -// using the same D1DatabaseSession will go to any replica that is consistent with -// the bookmark maintained by the session (returned by the first query). -"first-primary" -// Indicates that the first query can go anywhere (primary or replica), and the rest queries -// using the same D1DatabaseSession will go to any replica that is consistent with -// the bookmark maintained by the session (returned by the first query). - | "first-unconstrained"; -type D1SessionBookmark = string; -declare abstract class D1Database { - prepare(query: string): D1PreparedStatement; - batch(statements: D1PreparedStatement[]): Promise[]>; - exec(query: string): Promise; - /** - * Creates a new D1 Session anchored at the given constraint or the bookmark. - * All queries executed using the created session will have sequential consistency, - * meaning that all writes done through the session will be visible in subsequent reads. - * - * @param constraintOrBookmark Either the session constraint or the explicit bookmark to anchor the created session. - */ - withSession(constraintOrBookmark?: D1SessionBookmark | D1SessionConstraint): D1DatabaseSession; - /** - * @deprecated dump() will be removed soon, only applies to deprecated alpha v1 databases. - */ - dump(): Promise; -} -declare abstract class D1DatabaseSession { - prepare(query: string): D1PreparedStatement; - batch(statements: D1PreparedStatement[]): Promise[]>; - /** - * @returns The latest session bookmark across all executed queries on the session. - * If no query has been executed yet, `null` is returned. - */ - getBookmark(): D1SessionBookmark | null; -} -declare abstract class D1PreparedStatement { - bind(...values: unknown[]): D1PreparedStatement; - first(colName: string): Promise; - first>(): Promise; - run>(): Promise>; - all>(): Promise>; - raw(options: { - columnNames: true; - }): Promise<[ - string[], - ...T[] - ]>; - raw(options?: { - columnNames?: false; - }): Promise; -} -// `Disposable` was added to TypeScript's standard lib types in version 5.2. -// To support older TypeScript versions, define an empty `Disposable` interface. -// Users won't be able to use `using`/`Symbol.dispose` without upgrading to 5.2, -// but this will ensure type checking on older versions still passes. -// TypeScript's interface merging will ensure our empty interface is effectively -// ignored when `Disposable` is included in the standard lib. -interface Disposable { -} -/** - * An email message that can be sent from a Worker. - */ -interface EmailMessage { - /** - * Envelope From attribute of the email message. - */ - readonly from: string; - /** - * Envelope To attribute of the email message. - */ - readonly to: string; -} -/** - * An email message that is sent to a consumer Worker and can be rejected/forwarded. - */ -interface ForwardableEmailMessage extends EmailMessage { - /** - * Stream of the email message content. - */ - readonly raw: ReadableStream; - /** - * An [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). - */ - readonly headers: Headers; - /** - * Size of the email message content. - */ - readonly rawSize: number; - /** - * Reject this email message by returning a permanent SMTP error back to the connecting client including the given reason. - * @param reason The reject reason. - * @returns void - */ - setReject(reason: string): void; - /** - * Forward this email message to a verified destination address of the account. - * @param rcptTo Verified destination address. - * @param headers A [Headers object](https://developer.mozilla.org/en-US/docs/Web/API/Headers). - * @returns A promise that resolves when the email message is forwarded. - */ - forward(rcptTo: string, headers?: Headers): Promise; - /** - * Reply to the sender of this email message with a new EmailMessage object. - * @param message The reply message. - * @returns A promise that resolves when the email message is replied. - */ - reply(message: EmailMessage): Promise; -} -/** - * A binding that allows a Worker to send email messages. - */ -interface SendEmail { - send(message: EmailMessage): Promise; -} -declare abstract class EmailEvent extends ExtendableEvent { - readonly message: ForwardableEmailMessage; -} -declare type EmailExportedHandler = (message: ForwardableEmailMessage, env: Env, ctx: ExecutionContext) => void | Promise; -declare module "cloudflare:email" { - let _EmailMessage: { - prototype: EmailMessage; - new (from: string, to: string, raw: ReadableStream | string): EmailMessage; - }; - export { _EmailMessage as EmailMessage }; -} -interface Hyperdrive { - /** - * Connect directly to Hyperdrive as if it's your database, returning a TCP socket. - * - * Calling this method returns an idential socket to if you call - * `connect("host:port")` using the `host` and `port` fields from this object. - * Pick whichever approach works better with your preferred DB client library. - * - * Note that this socket is not yet authenticated -- it's expected that your - * code (or preferably, the client library of your choice) will authenticate - * using the information in this class's readonly fields. - */ - connect(): Socket; - /** - * A valid DB connection string that can be passed straight into the typical - * client library/driver/ORM. This will typically be the easiest way to use - * Hyperdrive. - */ - readonly connectionString: string; - /* - * A randomly generated hostname that is only valid within the context of the - * currently running Worker which, when passed into `connect()` function from - * the "cloudflare:sockets" module, will connect to the Hyperdrive instance - * for your database. - */ - readonly host: string; - /* - * The port that must be paired the the host field when connecting. - */ - readonly port: number; - /* - * The username to use when authenticating to your database via Hyperdrive. - * Unlike the host and password, this will be the same every time - */ - readonly user: string; - /* - * The randomly generated password to use when authenticating to your - * database via Hyperdrive. Like the host field, this password is only valid - * within the context of the currently running Worker instance from which - * it's read. - */ - readonly password: string; - /* - * The name of the database to connect to. - */ - readonly database: string; -} -// Copyright (c) 2024 Cloudflare, Inc. -// Licensed under the Apache 2.0 license found in the LICENSE file or at: -// https://opensource.org/licenses/Apache-2.0 -type ImageInfoResponse = { - format: 'image/svg+xml'; -} | { - format: string; - fileSize: number; - width: number; - height: number; -}; -type ImageTransform = { - width?: number; - height?: number; - background?: string; - blur?: number; - border?: { - color?: string; - width?: number; - } | { - top?: number; - bottom?: number; - left?: number; - right?: number; - }; - brightness?: number; - contrast?: number; - fit?: 'scale-down' | 'contain' | 'pad' | 'squeeze' | 'cover' | 'crop'; - flip?: 'h' | 'v' | 'hv'; - gamma?: number; - gravity?: 'left' | 'right' | 'top' | 'bottom' | 'center' | 'auto' | 'entropy' | { - x?: number; - y?: number; - mode: 'remainder' | 'box-center'; - }; - rotate?: 0 | 90 | 180 | 270; - saturation?: number; - sharpen?: number; - trim?: "border" | { - top?: number; - bottom?: number; - left?: number; - right?: number; - width?: number; - height?: number; - border?: boolean | { - color?: string; - tolerance?: number; - keep?: number; - }; - }; -}; -type ImageDrawOptions = { - opacity?: number; - repeat?: boolean | string; - top?: number; - left?: number; - bottom?: number; - right?: number; -}; -type ImageOutputOptions = { - format: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp' | 'image/avif' | 'rgb' | 'rgba'; - quality?: number; - background?: string; -}; -interface ImagesBinding { - /** - * Get image metadata (type, width and height) - * @throws {@link ImagesError} with code 9412 if input is not an image - * @param stream The image bytes - */ - info(stream: ReadableStream): Promise; - /** - * Begin applying a series of transformations to an image - * @param stream The image bytes - * @returns A transform handle - */ - input(stream: ReadableStream): ImageTransformer; -} -interface ImageTransformer { - /** - * Apply transform next, returning a transform handle. - * You can then apply more transformations, draw, or retrieve the output. - * @param transform - */ - transform(transform: ImageTransform): ImageTransformer; - /** - * Draw an image on this transformer, returning a transform handle. - * You can then apply more transformations, draw, or retrieve the output. - * @param image The image (or transformer that will give the image) to draw - * @param options The options configuring how to draw the image - */ - draw(image: ReadableStream | ImageTransformer, options?: ImageDrawOptions): ImageTransformer; - /** - * Retrieve the image that results from applying the transforms to the - * provided input - * @param options Options that apply to the output e.g. output format - */ - output(options: ImageOutputOptions): Promise; -} -interface ImageTransformationResult { - /** - * The image as a response, ready to store in cache or return to users - */ - response(): Response; - /** - * The content type of the returned image - */ - contentType(): string; - /** - * The bytes of the response - */ - image(): ReadableStream; -} -interface ImagesError extends Error { - readonly code: number; - readonly message: string; - readonly stack?: string; -} -type Params

= Record; -type EventContext = { - request: Request>; - functionPath: string; - waitUntil: (promise: Promise) => void; - passThroughOnException: () => void; - next: (input?: Request | string, init?: RequestInit) => Promise; - env: Env & { - ASSETS: { - fetch: typeof fetch; - }; - }; - params: Params

; - data: Data; -}; -type PagesFunction = Record> = (context: EventContext) => Response | Promise; -type EventPluginContext = { - request: Request>; - functionPath: string; - waitUntil: (promise: Promise) => void; - passThroughOnException: () => void; - next: (input?: Request | string, init?: RequestInit) => Promise; - env: Env & { - ASSETS: { - fetch: typeof fetch; - }; - }; - params: Params

; - data: Data; - pluginArgs: PluginArgs; -}; -type PagesPluginFunction = Record, PluginArgs = unknown> = (context: EventPluginContext) => Response | Promise; -declare module "assets:*" { - export const onRequest: PagesFunction; -} -// Copyright (c) 2022-2023 Cloudflare, Inc. -// Licensed under the Apache 2.0 license found in the LICENSE file or at: -// https://opensource.org/licenses/Apache-2.0 -declare module "cloudflare:pipelines" { - export abstract class PipelineTransformationEntrypoint { - protected env: Env; - protected ctx: ExecutionContext; - constructor(ctx: ExecutionContext, env: Env); - /** - * run recieves an array of PipelineRecord which can be - * transformed and returned to the pipeline - * @param records Incoming records from the pipeline to be transformed - * @param metadata Information about the specific pipeline calling the transformation entrypoint - * @returns A promise containing the transformed PipelineRecord array - */ - public run(records: I[], metadata: PipelineBatchMetadata): Promise; - } - export type PipelineRecord = Record; - export type PipelineBatchMetadata = { - pipelineId: string; - pipelineName: string; - }; - export interface Pipeline { - /** - * The Pipeline interface represents the type of a binding to a Pipeline - * - * @param records The records to send to the pipeline - */ - send(records: T[]): Promise; - } -} -// PubSubMessage represents an incoming PubSub message. -// The message includes metadata about the broker, the client, and the payload -// itself. -// https://developers.cloudflare.com/pub-sub/ -interface PubSubMessage { - // Message ID - readonly mid: number; - // MQTT broker FQDN in the form mqtts://BROKER.NAMESPACE.cloudflarepubsub.com:PORT - readonly broker: string; - // The MQTT topic the message was sent on. - readonly topic: string; - // The client ID of the client that published this message. - readonly clientId: string; - // The unique identifier (JWT ID) used by the client to authenticate, if token - // auth was used. - readonly jti?: string; - // A Unix timestamp (seconds from Jan 1, 1970), set when the Pub/Sub Broker - // received the message from the client. - readonly receivedAt: number; - // An (optional) string with the MIME type of the payload, if set by the - // client. - readonly contentType: string; - // Set to 1 when the payload is a UTF-8 string - // https://docs.oasis-open.org/mqtt/mqtt/v5.0/os/mqtt-v5.0-os.html#_Toc3901063 - readonly payloadFormatIndicator: number; - // Pub/Sub (MQTT) payloads can be UTF-8 strings, or byte arrays. - // You can use payloadFormatIndicator to inspect this before decoding. - payload: string | Uint8Array; -} -// JsonWebKey extended by kid parameter -interface JsonWebKeyWithKid extends JsonWebKey { - // Key Identifier of the JWK - readonly kid: string; -} -interface RateLimitOptions { - key: string; -} -interface RateLimitOutcome { - success: boolean; -} -interface RateLimit { - /** - * Rate limit a request based on the provided options. - * @see https://developers.cloudflare.com/workers/runtime-apis/bindings/rate-limit/ - * @returns A promise that resolves with the outcome of the rate limit. - */ - limit(options: RateLimitOptions): Promise; -} -// Namespace for RPC utility types. Unfortunately, we can't use a `module` here as these types need -// to referenced by `Fetcher`. This is included in the "importable" version of the types which -// strips all `module` blocks. -declare namespace Rpc { - // Branded types for identifying `WorkerEntrypoint`/`DurableObject`/`Target`s. - // TypeScript uses *structural* typing meaning anything with the same shape as type `T` is a `T`. - // For the classes exported by `cloudflare:workers` we want *nominal* typing (i.e. we only want to - // accept `WorkerEntrypoint` from `cloudflare:workers`, not any other class with the same shape) - export const __RPC_STUB_BRAND: '__RPC_STUB_BRAND'; - export const __RPC_TARGET_BRAND: '__RPC_TARGET_BRAND'; - export const __WORKER_ENTRYPOINT_BRAND: '__WORKER_ENTRYPOINT_BRAND'; - export const __DURABLE_OBJECT_BRAND: '__DURABLE_OBJECT_BRAND'; - export const __WORKFLOW_ENTRYPOINT_BRAND: '__WORKFLOW_ENTRYPOINT_BRAND'; - export interface RpcTargetBranded { - [__RPC_TARGET_BRAND]: never; - } - export interface WorkerEntrypointBranded { - [__WORKER_ENTRYPOINT_BRAND]: never; - } - export interface DurableObjectBranded { - [__DURABLE_OBJECT_BRAND]: never; - } - export interface WorkflowEntrypointBranded { - [__WORKFLOW_ENTRYPOINT_BRAND]: never; - } - export type EntrypointBranded = WorkerEntrypointBranded | DurableObjectBranded | WorkflowEntrypointBranded; - // Types that can be used through `Stub`s - export type Stubable = RpcTargetBranded | ((...args: any[]) => any); - // Types that can be passed over RPC - // The reason for using a generic type here is to build a serializable subset of structured - // cloneable composite types. This allows types defined with the "interface" keyword to pass the - // serializable check as well. Otherwise, only types defined with the "type" keyword would pass. - type Serializable = - // Structured cloneables - BaseType - // Structured cloneable composites - | Map ? Serializable : never, T extends Map ? Serializable : never> | Set ? Serializable : never> | ReadonlyArray ? Serializable : never> | { - [K in keyof T]: K extends number | string ? Serializable : never; - } - // Special types - | Stub - // Serialized as stubs, see `Stubify` - | Stubable; - // Base type for all RPC stubs, including common memory management methods. - // `T` is used as a marker type for unwrapping `Stub`s later. - interface StubBase extends Disposable { - [__RPC_STUB_BRAND]: T; - dup(): this; - } - export type Stub = Provider & StubBase; - // This represents all the types that can be sent as-is over an RPC boundary - type BaseType = void | undefined | null | boolean | number | bigint | string | TypedArray | ArrayBuffer | DataView | Date | Error | RegExp | ReadableStream | WritableStream | Request | Response | Headers; - // Recursively rewrite all `Stubable` types with `Stub`s - // prettier-ignore - type Stubify = T extends Stubable ? Stub : T extends Map ? Map, Stubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { - [key: string | number]: any; - } ? { - [K in keyof T]: Stubify; - } : T; - // Recursively rewrite all `Stub`s with the corresponding `T`s. - // Note we use `StubBase` instead of `Stub` here to avoid circular dependencies: - // `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`. - // prettier-ignore - type Unstubify = T extends StubBase ? V : T extends Map ? Map, Unstubify> : T extends Set ? Set> : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends BaseType ? T : T extends { - [key: string | number]: unknown; - } ? { - [K in keyof T]: Unstubify; - } : T; - type UnstubifyAll = { - [I in keyof A]: Unstubify; - }; - // Utility type for adding `Provider`/`Disposable`s to `object` types only. - // Note `unknown & T` is equivalent to `T`. - type MaybeProvider = T extends object ? Provider : unknown; - type MaybeDisposable = T extends object ? Disposable : unknown; - // Type for method return or property on an RPC interface. - // - Stubable types are replaced by stubs. - // - Serializable types are passed by value, with stubable types replaced by stubs - // and a top-level `Disposer`. - // Everything else can't be passed over PRC. - // Technically, we use custom thenables here, but they quack like `Promise`s. - // Intersecting with `(Maybe)Provider` allows pipelining. - // prettier-ignore - type Result = R extends Stubable ? Promise> & Provider : R extends Serializable ? Promise & MaybeDisposable> & MaybeProvider : never; - // Type for method or property on an RPC interface. - // For methods, unwrap `Stub`s in parameters, and rewrite returns to be `Result`s. - // Unwrapping `Stub`s allows calling with `Stubable` arguments. - // For properties, rewrite types to be `Result`s. - // In each case, unwrap `Promise`s. - type MethodOrProperty = V extends (...args: infer P) => infer R ? (...args: UnstubifyAll

) => Result> : Result>; - // Type for the callable part of an `Provider` if `T` is callable. - // This is intersected with methods/properties. - type MaybeCallableProvider = T extends (...args: any[]) => any ? MethodOrProperty : unknown; - // Base type for all other types providing RPC-like interfaces. - // Rewrites all methods/properties to be `MethodOrProperty`s, while preserving callable types. - // `Reserved` names (e.g. stub method names like `dup()`) and symbols can't be accessed over RPC. - export type Provider = MaybeCallableProvider & { - [K in Exclude>]: MethodOrProperty; - }; -} -declare namespace Cloudflare { - interface Env { - } -} -declare module 'cloudflare:workers' { - export type RpcStub = Rpc.Stub; - export const RpcStub: { - new (value: T): Rpc.Stub; - }; - export abstract class RpcTarget implements Rpc.RpcTargetBranded { - [Rpc.__RPC_TARGET_BRAND]: never; - } - // `protected` fields don't appear in `keyof`s, so can't be accessed over RPC - export abstract class WorkerEntrypoint implements Rpc.WorkerEntrypointBranded { - [Rpc.__WORKER_ENTRYPOINT_BRAND]: never; - protected ctx: ExecutionContext; - protected env: Env; - constructor(ctx: ExecutionContext, env: Env); - fetch?(request: Request): Response | Promise; - tail?(events: TraceItem[]): void | Promise; - trace?(traces: TraceItem[]): void | Promise; - scheduled?(controller: ScheduledController): void | Promise; - queue?(batch: MessageBatch): void | Promise; - test?(controller: TestController): void | Promise; - } - export abstract class DurableObject implements Rpc.DurableObjectBranded { - [Rpc.__DURABLE_OBJECT_BRAND]: never; - protected ctx: DurableObjectState; - protected env: Env; - constructor(ctx: DurableObjectState, env: Env); - fetch?(request: Request): Response | Promise; - alarm?(alarmInfo?: AlarmInvocationInfo): void | Promise; - webSocketMessage?(ws: WebSocket, message: string | ArrayBuffer): void | Promise; - webSocketClose?(ws: WebSocket, code: number, reason: string, wasClean: boolean): void | Promise; - webSocketError?(ws: WebSocket, error: unknown): void | Promise; - } - export type WorkflowDurationLabel = 'second' | 'minute' | 'hour' | 'day' | 'week' | 'month' | 'year'; - export type WorkflowSleepDuration = `${number} ${WorkflowDurationLabel}${'s' | ''}` | number; - export type WorkflowDelayDuration = WorkflowSleepDuration; - export type WorkflowTimeoutDuration = WorkflowSleepDuration; - export type WorkflowBackoff = 'constant' | 'linear' | 'exponential'; - export type WorkflowStepConfig = { - retries?: { - limit: number; - delay: WorkflowDelayDuration | number; - backoff?: WorkflowBackoff; - }; - timeout?: WorkflowTimeoutDuration | number; - }; - export type WorkflowEvent = { - payload: Readonly; - timestamp: Date; - instanceId: string; - }; - export type WorkflowStepEvent = { - payload: Readonly; - timestamp: Date; - type: string; - }; - export abstract class WorkflowStep { - do>(name: string, callback: () => Promise): Promise; - do>(name: string, config: WorkflowStepConfig, callback: () => Promise): Promise; - sleep: (name: string, duration: WorkflowSleepDuration) => Promise; - sleepUntil: (name: string, timestamp: Date | number) => Promise; - waitForEvent>(name: string, options: { - type: string; - timeout?: WorkflowTimeoutDuration | number; - }): Promise>; - } - export abstract class WorkflowEntrypoint | unknown = unknown> implements Rpc.WorkflowEntrypointBranded { - [Rpc.__WORKFLOW_ENTRYPOINT_BRAND]: never; - protected ctx: ExecutionContext; - protected env: Env; - constructor(ctx: ExecutionContext, env: Env); - run(event: Readonly>, step: WorkflowStep): Promise; - } - export const env: Cloudflare.Env; -} -interface SecretsStoreSecret { - /** - * Get a secret from the Secrets Store, returning a string of the secret value - * if it exists, or throws an error if it does not exist - */ - get(): Promise; -} -declare module "cloudflare:sockets" { - function _connect(address: string | SocketAddress, options?: SocketOptions): Socket; - export { _connect as connect }; -} -declare namespace TailStream { - interface Header { - readonly name: string; - readonly value: string; - } - interface FetchEventInfo { - readonly type: "fetch"; - readonly method: string; - readonly url: string; - readonly cfJson: string; - readonly headers: Header[]; - } - interface JsRpcEventInfo { - readonly type: "jsrpc"; - readonly methodName: string; - } - interface ScheduledEventInfo { - readonly type: "scheduled"; - readonly scheduledTime: Date; - readonly cron: string; - } - interface AlarmEventInfo { - readonly type: "alarm"; - readonly scheduledTime: Date; - } - interface QueueEventInfo { - readonly type: "queue"; - readonly queueName: string; - readonly batchSize: number; - } - interface EmailEventInfo { - readonly type: "email"; - readonly mailFrom: string; - readonly rcptTo: string; - readonly rawSize: number; - } - interface TraceEventInfo { - readonly type: "trace"; - readonly traces: (string | null)[]; - } - interface HibernatableWebSocketEventInfoMessage { - readonly type: "message"; - } - interface HibernatableWebSocketEventInfoError { - readonly type: "error"; - } - interface HibernatableWebSocketEventInfoClose { - readonly type: "close"; - readonly code: number; - readonly wasClean: boolean; - } - interface HibernatableWebSocketEventInfo { - readonly type: "hibernatableWebSocket"; - readonly info: HibernatableWebSocketEventInfoClose | HibernatableWebSocketEventInfoError | HibernatableWebSocketEventInfoMessage; - } - interface Resume { - readonly type: "resume"; - readonly attachment?: any; - } - interface CustomEventInfo { - readonly type: "custom"; - } - interface FetchResponseInfo { - readonly type: "fetch"; - readonly statusCode: number; - } - type EventOutcome = "ok" | "canceled" | "exception" | "unknown" | "killSwitch" | "daemonDown" | "exceededCpu" | "exceededMemory" | "loadShed" | "responseStreamDisconnected" | "scriptNotFound"; - interface ScriptVersion { - readonly id: string; - readonly tag?: string; - readonly message?: string; - } - interface Trigger { - readonly traceId: string; - readonly invocationId: string; - readonly spanId: string; - } - interface Onset { - readonly type: "onset"; - readonly dispatchNamespace?: string; - readonly entrypoint?: string; - readonly scriptName?: string; - readonly scriptTags?: string[]; - readonly scriptVersion?: ScriptVersion; - readonly trigger?: Trigger; - readonly info: FetchEventInfo | JsRpcEventInfo | ScheduledEventInfo | AlarmEventInfo | QueueEventInfo | EmailEventInfo | TraceEventInfo | HibernatableWebSocketEventInfo | Resume | CustomEventInfo; - } - interface Outcome { - readonly type: "outcome"; - readonly outcome: EventOutcome; - readonly cpuTime: number; - readonly wallTime: number; - } - interface Hibernate { - readonly type: "hibernate"; - } - interface SpanOpen { - readonly type: "spanOpen"; - readonly op?: string; - readonly info?: FetchEventInfo | JsRpcEventInfo | Attribute[]; - } - interface SpanClose { - readonly type: "spanClose"; - readonly outcome: EventOutcome; - } - interface DiagnosticChannelEvent { - readonly type: "diagnosticChannel"; - readonly channel: string; - readonly message: any; - } - interface Exception { - readonly type: "exception"; - readonly name: string; - readonly message: string; - readonly stack?: string; - } - interface Log { - readonly type: "log"; - readonly level: "debug" | "error" | "info" | "log" | "warn"; - readonly message: string; - } - interface Return { - readonly type: "return"; - readonly info?: FetchResponseInfo | Attribute[]; - } - interface Link { - readonly type: "link"; - readonly label?: string; - readonly traceId: string; - readonly invocationId: string; - readonly spanId: string; - } - interface Attribute { - readonly type: "attribute"; - readonly name: string; - readonly value: string | string[] | boolean | boolean[] | number | number[]; - } - type Mark = DiagnosticChannelEvent | Exception | Log | Return | Link | Attribute[]; - interface TailEvent { - readonly traceId: string; - readonly invocationId: string; - readonly spanId: string; - readonly timestamp: Date; - readonly sequence: number; - readonly event: Onset | Outcome | Hibernate | SpanOpen | SpanClose | Mark; - } - type TailEventHandler = (event: TailEvent) => void | Promise; - type TailEventHandlerName = "onset" | "outcome" | "hibernate" | "spanOpen" | "spanClose" | "diagnosticChannel" | "exception" | "log" | "return" | "link" | "attribute"; - type TailEventHandlerObject = Record; - type TailEventHandlerType = TailEventHandler | TailEventHandlerObject; -} -// Copyright (c) 2022-2023 Cloudflare, Inc. -// Licensed under the Apache 2.0 license found in the LICENSE file or at: -// https://opensource.org/licenses/Apache-2.0 -/** - * Data types supported for holding vector metadata. - */ -type VectorizeVectorMetadataValue = string | number | boolean | string[]; -/** - * Additional information to associate with a vector. - */ -type VectorizeVectorMetadata = VectorizeVectorMetadataValue | Record; -type VectorFloatArray = Float32Array | Float64Array; -interface VectorizeError { - code?: number; - error: string; -} -/** - * Comparison logic/operation to use for metadata filtering. - * - * This list is expected to grow as support for more operations are released. - */ -type VectorizeVectorMetadataFilterOp = "$eq" | "$ne"; -/** - * Filter criteria for vector metadata used to limit the retrieved query result set. - */ -type VectorizeVectorMetadataFilter = { - [field: string]: Exclude | null | { - [Op in VectorizeVectorMetadataFilterOp]?: Exclude | null; - }; -}; -/** - * Supported distance metrics for an index. - * Distance metrics determine how other "similar" vectors are determined. - */ -type VectorizeDistanceMetric = "euclidean" | "cosine" | "dot-product"; -/** - * Metadata return levels for a Vectorize query. - * - * Default to "none". - * - * @property all Full metadata for the vector return set, including all fields (including those un-indexed) without truncation. This is a more expensive retrieval, as it requires additional fetching & reading of un-indexed data. - * @property indexed Return all metadata fields configured for indexing in the vector return set. This level of retrieval is "free" in that no additional overhead is incurred returning this data. However, note that indexed metadata is subject to truncation (especially for larger strings). - * @property none No indexed metadata will be returned. - */ -type VectorizeMetadataRetrievalLevel = "all" | "indexed" | "none"; -interface VectorizeQueryOptions { - topK?: number; - namespace?: string; - returnValues?: boolean; - returnMetadata?: boolean | VectorizeMetadataRetrievalLevel; - filter?: VectorizeVectorMetadataFilter; -} -/** - * Information about the configuration of an index. - */ -type VectorizeIndexConfig = { - dimensions: number; - metric: VectorizeDistanceMetric; -} | { - preset: string; // keep this generic, as we'll be adding more presets in the future and this is only in a read capacity -}; -/** - * Metadata about an existing index. - * - * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. - * See {@link VectorizeIndexInfo} for its post-beta equivalent. - */ -interface VectorizeIndexDetails { - /** The unique ID of the index */ - readonly id: string; - /** The name of the index. */ - name: string; - /** (optional) A human readable description for the index. */ - description?: string; - /** The index configuration, including the dimension size and distance metric. */ - config: VectorizeIndexConfig; - /** The number of records containing vectors within the index. */ - vectorsCount: number; -} -/** - * Metadata about an existing index. - */ -interface VectorizeIndexInfo { - /** The number of records containing vectors within the index. */ - vectorCount: number; - /** Number of dimensions the index has been configured for. */ - dimensions: number; - /** ISO 8601 datetime of the last processed mutation on in the index. All changes before this mutation will be reflected in the index state. */ - processedUpToDatetime: number; - /** UUIDv4 of the last mutation processed by the index. All changes before this mutation will be reflected in the index state. */ - processedUpToMutation: number; -} -/** - * Represents a single vector value set along with its associated metadata. - */ -interface VectorizeVector { - /** The ID for the vector. This can be user-defined, and must be unique. It should uniquely identify the object, and is best set based on the ID of what the vector represents. */ - id: string; - /** The vector values */ - values: VectorFloatArray | number[]; - /** The namespace this vector belongs to. */ - namespace?: string; - /** Metadata associated with the vector. Includes the values of other fields and potentially additional details. */ - metadata?: Record; -} -/** - * Represents a matched vector for a query along with its score and (if specified) the matching vector information. - */ -type VectorizeMatch = Pick, "values"> & Omit & { - /** The score or rank for similarity, when returned as a result */ - score: number; -}; -/** - * A set of matching {@link VectorizeMatch} for a particular query. - */ -interface VectorizeMatches { - matches: VectorizeMatch[]; - count: number; -} -/** - * Results of an operation that performed a mutation on a set of vectors. - * Here, `ids` is a list of vectors that were successfully processed. - * - * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. - * See {@link VectorizeAsyncMutation} for its post-beta equivalent. - */ -interface VectorizeVectorMutation { - /* List of ids of vectors that were successfully processed. */ - ids: string[]; - /* Total count of the number of processed vectors. */ - count: number; -} -/** - * Result type indicating a mutation on the Vectorize Index. - * Actual mutations are processed async where the `mutationId` is the unique identifier for the operation. - */ -interface VectorizeAsyncMutation { - /** The unique identifier for the async mutation operation containing the changeset. */ - mutationId: string; -} -/** - * A Vectorize Vector Search Index for querying vectors/embeddings. - * - * This type is exclusively for the Vectorize **beta** and will be deprecated once Vectorize RC is released. - * See {@link Vectorize} for its new implementation. - */ -declare abstract class VectorizeIndex { - /** - * Get information about the currently bound index. - * @returns A promise that resolves with information about the current index. - */ - public describe(): Promise; - /** - * Use the provided vector to perform a similarity search across the index. - * @param vector Input vector that will be used to drive the similarity search. - * @param options Configuration options to massage the returned data. - * @returns A promise that resolves with matched and scored vectors. - */ - public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; - /** - * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. - * @param vectors List of vectors that will be inserted. - * @returns A promise that resolves with the ids & count of records that were successfully processed. - */ - public insert(vectors: VectorizeVector[]): Promise; - /** - * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. - * @param vectors List of vectors that will be upserted. - * @returns A promise that resolves with the ids & count of records that were successfully processed. - */ - public upsert(vectors: VectorizeVector[]): Promise; - /** - * Delete a list of vectors with a matching id. - * @param ids List of vector ids that should be deleted. - * @returns A promise that resolves with the ids & count of records that were successfully processed (and thus deleted). - */ - public deleteByIds(ids: string[]): Promise; - /** - * Get a list of vectors with a matching id. - * @param ids List of vector ids that should be returned. - * @returns A promise that resolves with the raw unscored vectors matching the id set. - */ - public getByIds(ids: string[]): Promise; -} -/** - * A Vectorize Vector Search Index for querying vectors/embeddings. - * - * Mutations in this version are async, returning a mutation id. - */ -declare abstract class Vectorize { - /** - * Get information about the currently bound index. - * @returns A promise that resolves with information about the current index. - */ - public describe(): Promise; - /** - * Use the provided vector to perform a similarity search across the index. - * @param vector Input vector that will be used to drive the similarity search. - * @param options Configuration options to massage the returned data. - * @returns A promise that resolves with matched and scored vectors. - */ - public query(vector: VectorFloatArray | number[], options?: VectorizeQueryOptions): Promise; - /** - * Use the provided vector-id to perform a similarity search across the index. - * @param vectorId Id for a vector in the index against which the index should be queried. - * @param options Configuration options to massage the returned data. - * @returns A promise that resolves with matched and scored vectors. - */ - public queryById(vectorId: string, options?: VectorizeQueryOptions): Promise; - /** - * Insert a list of vectors into the index dataset. If a provided id exists, an error will be thrown. - * @param vectors List of vectors that will be inserted. - * @returns A promise that resolves with a unique identifier of a mutation containing the insert changeset. - */ - public insert(vectors: VectorizeVector[]): Promise; - /** - * Upsert a list of vectors into the index dataset. If a provided id exists, it will be replaced with the new values. - * @param vectors List of vectors that will be upserted. - * @returns A promise that resolves with a unique identifier of a mutation containing the upsert changeset. - */ - public upsert(vectors: VectorizeVector[]): Promise; - /** - * Delete a list of vectors with a matching id. - * @param ids List of vector ids that should be deleted. - * @returns A promise that resolves with a unique identifier of a mutation containing the delete changeset. - */ - public deleteByIds(ids: string[]): Promise; - /** - * Get a list of vectors with a matching id. - * @param ids List of vector ids that should be returned. - * @returns A promise that resolves with the raw unscored vectors matching the id set. - */ - public getByIds(ids: string[]): Promise; -} -/** - * The interface for "version_metadata" binding - * providing metadata about the Worker Version using this binding. - */ -type WorkerVersionMetadata = { - /** The ID of the Worker Version using this binding */ - id: string; - /** The tag of the Worker Version using this binding */ - tag: string; - /** The timestamp of when the Worker Version was uploaded */ - timestamp: string; -}; -interface DynamicDispatchLimits { - /** - * Limit CPU time in milliseconds. - */ - cpuMs?: number; - /** - * Limit number of subrequests. - */ - subRequests?: number; -} -interface DynamicDispatchOptions { - /** - * Limit resources of invoked Worker script. - */ - limits?: DynamicDispatchLimits; - /** - * Arguments for outbound Worker script, if configured. - */ - outbound?: { - [key: string]: any; - }; -} -interface DispatchNamespace { - /** - * @param name Name of the Worker script. - * @param args Arguments to Worker script. - * @param options Options for Dynamic Dispatch invocation. - * @returns A Fetcher object that allows you to send requests to the Worker script. - * @throws If the Worker script does not exist in this dispatch namespace, an error will be thrown. - */ - get(name: string, args?: { - [key: string]: any; - }, options?: DynamicDispatchOptions): Fetcher; -} -declare module 'cloudflare:workflows' { - /** - * NonRetryableError allows for a user to throw a fatal error - * that makes a Workflow instance fail immediately without triggering a retry - */ - export class NonRetryableError extends Error { - public constructor(message: string, name?: string); - } -} -declare abstract class Workflow { - /** - * Get a handle to an existing instance of the Workflow. - * @param id Id for the instance of this Workflow - * @returns A promise that resolves with a handle for the Instance - */ - public get(id: string): Promise; - /** - * Create a new instance and return a handle to it. If a provided id exists, an error will be thrown. - * @param options Options when creating an instance including id and params - * @returns A promise that resolves with a handle for the Instance - */ - public create(options?: WorkflowInstanceCreateOptions): Promise; - /** - * Create a batch of instances and return handle for all of them. If a provided id exists, an error will be thrown. - * `createBatch` is limited at 100 instances at a time or when the RPC limit for the batch (1MiB) is reached. - * @param batch List of Options when creating an instance including name and params - * @returns A promise that resolves with a list of handles for the created instances. - */ - public createBatch(batch: WorkflowInstanceCreateOptions[]): Promise; -} -interface WorkflowInstanceCreateOptions { - /** - * An id for your Workflow instance. Must be unique within the Workflow. - */ - id?: string; - /** - * The event payload the Workflow instance is triggered with - */ - params?: PARAMS; -} -type InstanceStatus = { - status: 'queued' // means that instance is waiting to be started (see concurrency limits) - | 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running - | 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish - | 'waitingForPause' // instance is finishing the current work to pause - | 'unknown'; - error?: string; - output?: object; -}; -interface WorkflowError { - code?: number; - message: string; -} -declare abstract class WorkflowInstance { - public id: string; - /** - * Pause the instance. - */ - public pause(): Promise; - /** - * Resume the instance. If it is already running, an error will be thrown. - */ - public resume(): Promise; - /** - * Terminate the instance. If it is errored, terminated or complete, an error will be thrown. - */ - public terminate(): Promise; - /** - * Restart the instance. - */ - public restart(): Promise; - /** - * Returns the current status of the instance. - */ - public status(): Promise; - /** - * Send an event to this instance. - */ - public sendEvent({ type, payload, }: { - type: string; - payload: unknown; - }): Promise; -} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 558bbe69..ea0021a9 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -120,9 +120,6 @@ importers: '@cloudflare/vitest-pool-workers': specifier: ^0.8.58 version: 0.8.71(@cloudflare/workers-types@4.20250726.0)(@vitest/runner@3.2.4)(@vitest/snapshot@3.2.4)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.15.31)(jiti@2.5.1)(tsx@4.20.3)(yaml@2.8.0)) - '@cloudflare/workers-types': - specifier: ^4.20250726.0 - version: 4.20250726.0 '@eslint/js': specifier: ^9.26.0 version: 9.29.0 From 6d19a1aa03518c1bf0516d26b932dd966f939100 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 00:46:14 +0200 Subject: [PATCH 22/29] Update wrangler dependency to version 4.42.0 in package.json and pnpm-lock.yaml --- apps/api/package.json | 2 +- pnpm-lock.yaml | 129 +++++++++++++++++++++++++++++++++++++++++- 2 files changed, 127 insertions(+), 4 deletions(-) diff --git a/apps/api/package.json b/apps/api/package.json index d062a7fd..db99e9a7 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -37,7 +37,7 @@ "typescript": "^5.8.3", "typescript-eslint": "^8.31.1", "vitest": "^3.2.4", - "wrangler": "^4.26.1" + "wrangler": "^4.42.0" }, "dependencies": { "@anthropic-ai/sdk": "^0.62.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ea0021a9..415fa7b0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -160,8 +160,8 @@ importers: specifier: ^3.2.4 version: 3.2.4(@types/debug@4.1.12)(@types/node@22.15.31)(jiti@2.5.1)(tsx@4.20.3)(yaml@2.8.0) wrangler: - specifier: ^4.26.1 - version: 4.26.1(@cloudflare/workers-types@4.20250726.0) + specifier: ^4.42.0 + version: 4.42.0(@cloudflare/workers-types@4.20250726.0) apps/web: dependencies: @@ -734,6 +734,15 @@ packages: workerd: optional: true + '@cloudflare/unenv-preset@2.7.6': + resolution: {integrity: sha512-ykG2nd3trk6jbknRCH69xL3RpGLLbKCrbTbWSOvKEq7s4jH06yLrQlRr/q9IU+dK9p1JY1EXqhFK7VG5KqhzmQ==} + peerDependencies: + unenv: 2.0.0-rc.21 + workerd: ^1.20250927.0 + peerDependenciesMeta: + workerd: + optional: true + '@cloudflare/vitest-pool-workers@0.8.71': resolution: {integrity: sha512-keu2HCLQfRNwbmLBCDXJgCFpANTaYnQpE01fBOo4CNwiWHUT7SZGN7w64RKiSWRHyYppStXBuE5Ng7F42+flpg==} peerDependencies: @@ -753,6 +762,12 @@ packages: cpu: [x64] os: [darwin] + '@cloudflare/workerd-darwin-64@1.20251001.0': + resolution: {integrity: sha512-y1ST/cCscaRewWRnsHZdWbgiLJbki5UMGd0hMo/FLqjlztwPeDgQ5CGm5jMiCDdw/IBCpWxEukftPYR34rWNog==} + engines: {node: '>=16'} + cpu: [x64] + os: [darwin] + '@cloudflare/workerd-darwin-arm64@1.20250726.0': resolution: {integrity: sha512-I+TOQ+YQahxL/K7eS2GJzv5CZzSVaZoyqfB15Q71MT/+wyzPCaFDTt+fg3uXdwpaIQEMUfqFNpTQSqbKHAYNgA==} engines: {node: '>=16'} @@ -765,6 +780,12 @@ packages: cpu: [arm64] os: [darwin] + '@cloudflare/workerd-darwin-arm64@1.20251001.0': + resolution: {integrity: sha512-+z4QHHZ/Yix82zLFYS+ZS2UV09IENFPwDCEKUWfnrM9Km2jOOW3Ua4hJNob1EgQUYs8fFZo7k5O/tpwxMsSbbQ==} + engines: {node: '>=16'} + cpu: [arm64] + os: [darwin] + '@cloudflare/workerd-linux-64@1.20250726.0': resolution: {integrity: sha512-WSCv4o2uOW6b++ROVazrEW+jjZdBqCmXmmt7uVVfvjVxlzoYVwK9IvV2IXe4gsJ99HG9I0YCa7AT743cZ7TNNg==} engines: {node: '>=16'} @@ -777,6 +798,12 @@ packages: cpu: [x64] os: [linux] + '@cloudflare/workerd-linux-64@1.20251001.0': + resolution: {integrity: sha512-hGS+O2V9Mm2XjJUaB9ZHMA5asDUaDjKko42e+accbew0PQR7zrAl1afdII6hMqCLV4tk4GAjvhv281pN4g48rg==} + engines: {node: '>=16'} + cpu: [x64] + os: [linux] + '@cloudflare/workerd-linux-arm64@1.20250726.0': resolution: {integrity: sha512-jNokAGL3EQqH+31b0dX8+tlbKdjt/0UtTLvgD1e+7bOD92lzjYMa/CixHyMIY/FVvhsN4TNqfiz4cqroABTlhg==} engines: {node: '>=16'} @@ -789,6 +816,12 @@ packages: cpu: [arm64] os: [linux] + '@cloudflare/workerd-linux-arm64@1.20251001.0': + resolution: {integrity: sha512-QYaMK+pRgt28N7CX1JlJ+ToegJF9LxzqdT7MjWqPgVj9D2WTyIhBVYl3wYjJRcgOlnn+DRt42+li4T64CPEeuA==} + engines: {node: '>=16'} + cpu: [arm64] + os: [linux] + '@cloudflare/workerd-windows-64@1.20250726.0': resolution: {integrity: sha512-DiPTY63TNh6/ylvfutNQzYZi688x6NJDjQoqf5uiCp7xHweWx+GpVs42sZPeeXqCNvhm4dYjHjuigXJNh7t8Uw==} engines: {node: '>=16'} @@ -801,6 +834,12 @@ packages: cpu: [x64] os: [win32] + '@cloudflare/workerd-windows-64@1.20251001.0': + resolution: {integrity: sha512-ospnDR/FlyRvrv9DSHuxDAXmzEBLDUiAHQrQHda1iUH9HqxnNQ8giz9VlPfq7NIRc7bQ1ZdIYPGLJOY4Q366Ng==} + engines: {node: '>=16'} + cpu: [x64] + os: [win32] + '@cloudflare/workers-types@4.20250726.0': resolution: {integrity: sha512-NtM1yVBKJFX4LgSoZkVU0EDhWWvSb1vt6REO+uMYZRgx1HAfQz9GDN6bBB0B+fm2ZIxzt6FzlDbmrXpGJ2M/4Q==} @@ -4691,6 +4730,11 @@ packages: engines: {node: '>=18.0.0'} hasBin: true + miniflare@4.20251001.0: + resolution: {integrity: sha512-OHd31D2LT8JH+85nVXClV0Z18jxirCohzKNAcZs/fgt4mIkUDtidX3VqR3ovAM0jWooNxrFhB9NSs3iDbiJF7Q==} + engines: {node: '>=18.0.0'} + hasBin: true + minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} @@ -5738,6 +5782,11 @@ packages: engines: {node: '>=16'} hasBin: true + workerd@1.20251001.0: + resolution: {integrity: sha512-oT/K4YWNhmwpVmGeaHNmF7mLRfgjszlVr7lJtpS4jx5khmxmMzWZEEQRrJEpgzeHP6DOq9qWLPNT0bjMK7TchQ==} + engines: {node: '>=16'} + hasBin: true + wrangler@4.26.1: resolution: {integrity: sha512-zGFEtHrjTAWOngm+zwEvYCxFwMSIBrzHa3Yu6rAxYMEzsT8PPvo2rdswyUJiUkpE9s2Depr37opceaY7JxEYFw==} engines: {node: '>=18.0.0'} @@ -5758,6 +5807,16 @@ packages: '@cloudflare/workers-types': optional: true + wrangler@4.42.0: + resolution: {integrity: sha512-OZXiUSfGD66OVkncDbjZtqrsH6bWPRQMYc6RmMbkzYm/lEvJ8lvARKcqDgEyq8zDAgJAivlMQLyPtKQoVjQ/4g==} + engines: {node: '>=18.0.0'} + hasBin: true + peerDependencies: + '@cloudflare/workers-types': ^4.20251001.0 + peerDependenciesMeta: + '@cloudflare/workers-types': + optional: true + wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} @@ -6422,6 +6481,12 @@ snapshots: optionalDependencies: workerd: 1.20250906.0 + '@cloudflare/unenv-preset@2.7.6(unenv@2.0.0-rc.21)(workerd@1.20251001.0)': + dependencies: + unenv: 2.0.0-rc.21 + optionalDependencies: + workerd: 1.20251001.0 + '@cloudflare/vitest-pool-workers@0.8.71(@cloudflare/workers-types@4.20250726.0)(@vitest/runner@3.2.4)(@vitest/snapshot@3.2.4)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.15.31)(jiti@2.5.1)(tsx@4.20.3)(yaml@2.8.0))': dependencies: '@vitest/runner': 3.2.4 @@ -6445,30 +6510,45 @@ snapshots: '@cloudflare/workerd-darwin-64@1.20250906.0': optional: true + '@cloudflare/workerd-darwin-64@1.20251001.0': + optional: true + '@cloudflare/workerd-darwin-arm64@1.20250726.0': optional: true '@cloudflare/workerd-darwin-arm64@1.20250906.0': optional: true + '@cloudflare/workerd-darwin-arm64@1.20251001.0': + optional: true + '@cloudflare/workerd-linux-64@1.20250726.0': optional: true '@cloudflare/workerd-linux-64@1.20250906.0': optional: true + '@cloudflare/workerd-linux-64@1.20251001.0': + optional: true + '@cloudflare/workerd-linux-arm64@1.20250726.0': optional: true '@cloudflare/workerd-linux-arm64@1.20250906.0': optional: true + '@cloudflare/workerd-linux-arm64@1.20251001.0': + optional: true + '@cloudflare/workerd-windows-64@1.20250726.0': optional: true '@cloudflare/workerd-windows-64@1.20250906.0': optional: true + '@cloudflare/workerd-windows-64@1.20251001.0': + optional: true + '@cloudflare/workers-types@4.20250726.0': {} '@cspotcode/source-map-support@0.8.1': @@ -6990,7 +7070,7 @@ snapshots: '@jridgewell/trace-mapping@0.3.25': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.4 '@jridgewell/trace-mapping@0.3.9': dependencies: @@ -11215,6 +11295,24 @@ snapshots: - bufferutil - utf-8-validate + miniflare@4.20251001.0: + dependencies: + '@cspotcode/source-map-support': 0.8.1 + acorn: 8.14.0 + acorn-walk: 8.3.2 + exit-hook: 2.2.1 + glob-to-regexp: 0.4.1 + sharp: 0.33.5 + stoppable: 1.1.0 + undici: 7.14.0 + workerd: 1.20251001.0 + ws: 8.18.0 + youch: 4.1.0-beta.10 + zod: 3.25.76 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + minimatch@3.1.2: dependencies: brace-expansion: 1.1.12 @@ -12345,6 +12443,14 @@ snapshots: '@cloudflare/workerd-linux-arm64': 1.20250906.0 '@cloudflare/workerd-windows-64': 1.20250906.0 + workerd@1.20251001.0: + optionalDependencies: + '@cloudflare/workerd-darwin-64': 1.20251001.0 + '@cloudflare/workerd-darwin-arm64': 1.20251001.0 + '@cloudflare/workerd-linux-64': 1.20251001.0 + '@cloudflare/workerd-linux-arm64': 1.20251001.0 + '@cloudflare/workerd-windows-64': 1.20251001.0 + wrangler@4.26.1(@cloudflare/workers-types@4.20250726.0): dependencies: '@cloudflare/kv-asset-handler': 0.4.0 @@ -12379,6 +12485,23 @@ snapshots: - bufferutil - utf-8-validate + wrangler@4.42.0(@cloudflare/workers-types@4.20250726.0): + dependencies: + '@cloudflare/kv-asset-handler': 0.4.0 + '@cloudflare/unenv-preset': 2.7.6(unenv@2.0.0-rc.21)(workerd@1.20251001.0) + blake3-wasm: 2.1.5 + esbuild: 0.25.4 + miniflare: 4.20251001.0 + path-to-regexp: 6.3.0 + unenv: 2.0.0-rc.21 + workerd: 1.20251001.0 + optionalDependencies: + '@cloudflare/workers-types': 4.20250726.0 + fsevents: 2.3.3 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + wrap-ansi@7.0.0: dependencies: ansi-styles: 4.3.0 From 549e19977c3afc5de0fc75b63e45849783cdaacd Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 00:53:42 +0200 Subject: [PATCH 23/29] Reorganize exports in index.ts to improve module structure --- apps/api/src/index.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 49ea52ab..304088e1 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,9 +1,9 @@ import { Hono } from "hono"; -export { WorkflowSession } from "./durable-objects/workflow-session"; -export { Runtime } from "./runtime/runtime"; + import auth from "./auth"; import { ApiContext } from "./context"; import { handleCronTriggers } from "./cron"; +import { WorkflowSession } from "./durable-objects/workflow-session"; import { handleIncomingEmail } from "./email"; import { corsMiddleware } from "./middleware/cors"; import { createRateLimitMiddleware } from "./middleware/rate-limit"; @@ -23,6 +23,7 @@ import typeRoutes from "./routes/types"; import usageRoutes from "./routes/usage"; import workflowRoutes from "./routes/workflows"; import wsRoutes from "./routes/ws"; +import { Runtime } from "./runtime/runtime"; // Initialize Hono app with types const app = new Hono(); @@ -69,6 +70,8 @@ app.route("/:organizationIdOrHandle/objects", objectRoutes); app.route("/:organizationIdOrHandle/usage", usageRoutes); app.route("/:organizationIdOrHandle/ws", wsRoutes); +export { Runtime, WorkflowSession }; + export default { scheduled: handleCronTriggers, email: handleIncomingEmail, From 4ebf987e9d70357e5ff5a8a46673795c8e9332ae Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 01:00:25 +0200 Subject: [PATCH 24/29] Remove redundant script_name from WORKFLOW_SESSION bindings in wrangler configuration --- apps/api/wrangler.jsonc | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index ec7f6de2..eefce78d 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -71,8 +71,7 @@ "bindings": [ { "name": "WORKFLOW_SESSION", - "class_name": "WorkflowSession", - "script_name": "dafthunk-api" + "class_name": "WorkflowSession" } ] }, @@ -168,8 +167,7 @@ "bindings": [ { "name": "WORKFLOW_SESSION", - "class_name": "WorkflowSession", - "script_name": "dafthunk-api" + "class_name": "WorkflowSession" } ] }, From 8c1125508f1da4892965b3fa85b1b96ad28d95ac Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 01:06:45 +0200 Subject: [PATCH 25/29] Add migration for WorkflowSession class in wrangler configuration --- apps/api/wrangler.jsonc | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index eefce78d..2616bd50 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -73,6 +73,12 @@ "name": "WORKFLOW_SESSION", "class_name": "WorkflowSession" } + ], + "migrations": [ + { + "tag": "v1", + "new_classes": ["WorkflowSession"] + } ] }, "unsafe": { @@ -169,6 +175,12 @@ "name": "WORKFLOW_SESSION", "class_name": "WorkflowSession" } + ], + "migrations": [ + { + "tag": "v1", + "new_classes": ["WorkflowSession"] + } ] }, "unsafe": { From 51d5e6577a40ee1ef8885164aeb9c6eade1fe8ce Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 01:06:45 +0200 Subject: [PATCH 26/29] Add migration for WorkflowSession class in wrangler configuration --- apps/api/wrangler.jsonc | 58 +++++++++++++++++++++++++---------------- 1 file changed, 35 insertions(+), 23 deletions(-) diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index 2616bd50..a5ba4485 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -7,9 +7,13 @@ "name": "dafthunk-api", "main": "src/index.ts", "compatibility_date": "2024-10-22", - "compatibility_flags": ["nodejs_compat"], + "compatibility_flags": [ + "nodejs_compat" + ], "triggers": { - "crons": ["* * * * *"] + "crons": [ + "* * * * *" + ] }, "observability": { "enabled": true, @@ -20,7 +24,6 @@ * Docs: https://developers.cloudflare.com/workers/configuration/smart-placement/#smart-placement */ // "placement": { "mode": "smart" }, - /** * Bindings * Bindings allow your Worker to interact with resources on the Cloudflare Developer Platform, including @@ -65,22 +68,28 @@ } ], "analytics_engine_datasets": [ - { "binding": "COMPUTE", "dataset": "dafthunk-compute-development" } + { + "binding": "COMPUTE", + "dataset": "dafthunk-compute-development" + } ], "durable_objects": { "bindings": [ { "name": "WORKFLOW_SESSION", - "class_name": "WorkflowSession" + "class_name": "WorkflowSession", + "script_name": "dafthunk-api" } ], - "migrations": [ - { - "tag": "v1", - "new_classes": ["WorkflowSession"] - } - ] }, + "migrations": [ + { + "tag": "v1", + "new_classes": [ + "WorkflowSession" + ] + } + ], "unsafe": { "bindings": [ { @@ -112,7 +121,6 @@ } ] }, - /** * Environment Variables * https://developers.cloudflare.com/workers/wrangler/configuration/#environment-variables @@ -167,22 +175,28 @@ } ], "analytics_engine_datasets": [ - { "binding": "COMPUTE", "dataset": "dafthunk-compute-production" } + { + "binding": "COMPUTE", + "dataset": "dafthunk-compute-production" + } ], "durable_objects": { "bindings": [ { "name": "WORKFLOW_SESSION", - "class_name": "WorkflowSession" + "class_name": "WorkflowSession", + "script_name": "dafthunk-api" } ], - "migrations": [ - { - "tag": "v1", - "new_classes": ["WorkflowSession"] - } - ] }, + "migrations": [ + { + "tag": "v1", + "new_classes": [ + "WorkflowSession" + ] + } + ], "unsafe": { "bindings": [ { @@ -223,16 +237,14 @@ * Note: Use secrets to store sensitive data. * https://developers.cloudflare.com/workers/configuration/secrets/ */ - /** * Static Assets * https://developers.cloudflare.com/workers/static-assets/binding/ */ // "assets": { "directory": "./public/", "binding": "ASSETS" }, - /** * Service Bindings (communicate between multiple Workers) * https://developers.cloudflare.com/workers/wrangler/configuration/#service-bindings */ // "services": [{ "binding": "MY_SERVICE", "service": "my-service" }] -} +} \ No newline at end of file From 2eeec00b7d77f87eb78ec6c2ff09be0acea74c63 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Mon, 6 Oct 2025 22:11:31 +0200 Subject: [PATCH 27/29] Refactor workflow execution handling: remove monitorProgress flag, add WebSocket execution updates, and enhance WorkflowSession class for better state management --- apps/api/src/cron.ts | 1 - .../src/durable-objects/workflow-session.ts | 215 +++++++++++++++++- apps/api/src/email.ts | 1 - apps/api/src/routes/deployments.ts | 4 - apps/api/src/routes/workflows.ts | 3 - apps/api/src/runtime/runtime.ts | 111 +++++---- apps/api/wrangler.jsonc | 22 +- apps/web/src/hooks/use-editable-workflow.ts | 25 +- apps/web/src/pages/editor-page.tsx | 19 +- apps/web/src/services/workflow-service.ts | 39 +++- .../src/services/workflow-session-service.ts | 41 +++- packages/types/src/workflow.ts | 7 +- 12 files changed, 398 insertions(+), 90 deletions(-) diff --git a/apps/api/src/cron.ts b/apps/api/src/cron.ts index b63c3c57..9cc53659 100644 --- a/apps/api/src/cron.ts +++ b/apps/api/src/cron.ts @@ -50,7 +50,6 @@ async function executeWorkflow( nodes: workflowData.nodes, edges: workflowData.edges, }, - monitorProgress: false, deploymentId: deploymentId, }, }); diff --git a/apps/api/src/durable-objects/workflow-session.ts b/apps/api/src/durable-objects/workflow-session.ts index cf2e3259..791537c4 100644 --- a/apps/api/src/durable-objects/workflow-session.ts +++ b/apps/api/src/durable-objects/workflow-session.ts @@ -1,5 +1,15 @@ +/** + * WorkflowSession Durable Object + * + * Manages workflow state synchronization and execution coordination via WebSocket. + * Clients connect via WebSocket to sync state and receive realtime execution updates. + */ + import { WorkflowErrorMessage, + WorkflowExecuteMessage, + WorkflowExecution, + WorkflowExecutionUpdateMessage, WorkflowInitMessage, WorkflowMessage, WorkflowState, @@ -9,16 +19,23 @@ import { import { DurableObject } from "cloudflare:workers"; import { Bindings } from "../context"; -import { createDatabase } from "../db/index"; -import { getWorkflowWithUserAccess, updateWorkflow } from "../db/queries"; +import { createDatabase, ExecutionStatus, saveExecution } from "../db/index"; +import { + getOrganizationComputeCredits, + getWorkflowWithUserAccess, + updateWorkflow, +} from "../db/queries"; export class WorkflowSession extends DurableObject { private static readonly PERSIST_DEBOUNCE_MS = 500; private state: WorkflowState | null = null; private organizationId: string | null = null; + private userId: string | null = null; private pendingPersistTimeout: number | undefined = undefined; private connectedUsers: Set = new Set(); + private executions: Map = new Map(); + private executionIdToWebSocket: Map = new Map(); constructor(ctx: DurableObjectState, env: Bindings) { super(ctx, env); @@ -177,7 +194,15 @@ export class WorkflowSession extends DurableObject { async fetch(request: Request): Promise { const url = new URL(request.url); - // Extract workflowId from URL path (e.g., /ws/:workflowId) + // This endpoint is ONLY called by the Runtime (Cloudflare Workflow) + // to send execution progress updates. Clients never call this directly. + if (url.pathname.endsWith("/execution") && request.method === "POST") { + return this.handleExecutionUpdate(request); + } + + // This endpoint is called by the api to establish a WebSocket connection. + // It requires authentication and userId. + // It extracts workflowId from the URL path. const pathParts = url.pathname.split("/").filter(Boolean); const workflowId = pathParts[pathParts.length - 1] || ""; @@ -196,10 +221,10 @@ export class WorkflowSession extends DurableObject { }); } - // Only load if not already in memory if (!this.state) { try { await this.loadState(workflowId, userId); + this.userId = userId; } catch (error) { console.error("Error loading workflow:", error); return Response.json( @@ -242,12 +267,53 @@ export class WorkflowSession extends DurableObject { } } + /** + * Handle execution updates from Runtime (internal endpoint) + */ + private async handleExecutionUpdate(request: Request): Promise { + try { + const execution = (await request.json()) as WorkflowExecution; + + const ws = this.executionIdToWebSocket.get(execution.id); + if (!ws) { + console.warn( + `No WebSocket connection found for execution ${execution.id}` + ); + return Response.json({ ok: true }); + } + + this.executions.set(ws, execution); + + const updateMessage: WorkflowExecutionUpdateMessage = { + type: "execution_update", + executionId: execution.id, + status: execution.status, + nodeExecutions: execution.nodeExecutions, + error: execution.error, + }; + + ws.send(JSON.stringify(updateMessage)); + + return Response.json({ ok: true }); + } catch (error) { + console.error("Error handling execution update:", error); + return Response.json( + { + error: "Failed to handle execution update", + details: error instanceof Error ? error.message : "Unknown error", + }, + { status: 500 } + ); + } + } + private async handleWebSocketUpgrade(_request: Request): Promise { const webSocketPair = new WebSocketPair(); const [client, server] = Object.values(webSocketPair); this.ctx.acceptWebSocket(server); this.connectedUsers.add(server); + this.executions.set(server, null); // Initialize with no execution const initState = await this.getState(); const initMessage: WorkflowInitMessage = { @@ -262,6 +328,13 @@ export class WorkflowSession extends DurableObject { }); } + /** + * Handle WebSocket messages from client + * + * Supports two message types: + * 1. WorkflowUpdateMessage - Update workflow state (nodes/edges) + * 2. WorkflowExecuteMessage - Trigger workflow execution or register for updates + */ async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer) { try { if (typeof message !== "string") { @@ -276,9 +349,18 @@ export class WorkflowSession extends DurableObject { if ("type" in data && data.type === "update") { const updateMsg = data as WorkflowUpdateMessage; - - // Update with the new state await this.updateState(updateMsg.state); + } else if ("type" in data && data.type === "execute") { + const executeMsg = data as WorkflowExecuteMessage; + + if (executeMsg.executionId) { + this.executionIdToWebSocket.set(executeMsg.executionId, ws); + console.log( + `Registered execution ${executeMsg.executionId} for WebSocket updates` + ); + } else { + await this.handleExecuteWorkflow(ws, executeMsg.parameters); + } } } catch (error) { console.error("WebSocket message error:", error); @@ -290,16 +372,133 @@ export class WorkflowSession extends DurableObject { } } + /** + * Handle workflow execution triggered via WebSocket + */ + private async handleExecuteWorkflow( + ws: WebSocket, + _parameters?: Record + ): Promise { + if (!this.state || !this.organizationId || !this.userId) { + const errorMsg: WorkflowErrorMessage = { + error: "Workflow not initialized", + }; + ws.send(JSON.stringify(errorMsg)); + return; + } + + try { + const db = createDatabase(this.env.DB); + + // Get organization compute credits + const computeCredits = await getOrganizationComputeCredits( + db, + this.organizationId + ); + if (computeCredits === undefined) { + const errorMsg: WorkflowErrorMessage = { + error: "Organization not found", + }; + ws.send(JSON.stringify(errorMsg)); + return; + } + + // Validate workflow has nodes + if (!this.state.nodes || this.state.nodes.length === 0) { + const errorMsg: WorkflowErrorMessage = { + error: + "Cannot execute an empty workflow. Please add nodes to the workflow.", + }; + ws.send(JSON.stringify(errorMsg)); + return; + } + + const executionParams = { + workflow: { + id: this.state.id, + name: this.state.name, + handle: this.state.handle, + type: this.state.type, + nodes: this.state.nodes, + edges: this.state.edges, + }, + userId: this.userId, + organizationId: this.organizationId, + computeCredits, + workflowSessionId: this.state.id, + }; + + // Start workflow execution + const instance = await this.env.EXECUTE.create({ + params: executionParams, + }); + const executionId = instance.id; + + // Register this WebSocket for execution updates + this.executionIdToWebSocket.set(executionId, ws); + + // Build initial nodeExecutions + const nodeExecutions = this.state.nodes.map((node) => ({ + nodeId: node.id, + status: "executing" as const, + })); + + // Save initial execution record + const initialExecution = await saveExecution(db, { + id: executionId, + workflowId: this.state.id, + userId: this.userId, + organizationId: this.organizationId, + status: ExecutionStatus.EXECUTING, + nodeExecutions, + createdAt: new Date(), + updatedAt: new Date(), + }); + + // Store execution for this WebSocket + this.executions.set(ws, { + id: initialExecution.id, + workflowId: initialExecution.workflowId, + status: "submitted", + nodeExecutions: initialExecution.nodeExecutions, + }); + + // Send execution started message + const updateMessage: WorkflowExecutionUpdateMessage = { + type: "execution_update", + executionId: initialExecution.id, + status: "submitted", + nodeExecutions: initialExecution.nodeExecutions, + }; + ws.send(JSON.stringify(updateMessage)); + + console.log( + `Started workflow execution ${executionId} for workflow ${this.state.id}` + ); + } catch (error) { + console.error("Failed to execute workflow:", error); + const errorMsg: WorkflowErrorMessage = { + error: "Failed to execute workflow", + details: error instanceof Error ? error.message : "Unknown error", + }; + ws.send(JSON.stringify(errorMsg)); + } + } + async webSocketClose( ws: WebSocket, _code: number, _reason: string, _wasClean: boolean ) { - // Remove WebSocket from connected users this.connectedUsers.delete(ws); - // Flush pending persist when connection closes + const execution = this.executions.get(ws); + if (execution) { + this.executionIdToWebSocket.delete(execution.id); + } + this.executions.delete(ws); + if (this.pendingPersistTimeout !== undefined) { clearTimeout(this.pendingPersistTimeout); await this.persistToDatabase(); diff --git a/apps/api/src/email.ts b/apps/api/src/email.ts index d3a3bae0..bc75fb10 100644 --- a/apps/api/src/email.ts +++ b/apps/api/src/email.ts @@ -159,7 +159,6 @@ export async function handleIncomingEmail( nodes: workflowData.nodes, edges: workflowData.edges, }, - monitorProgress: false, deploymentId, emailMessage: { from, diff --git a/apps/api/src/routes/deployments.ts b/apps/api/src/routes/deployments.ts index 56ee2214..7b0ee33e 100644 --- a/apps/api/src/routes/deployments.ts +++ b/apps/api/src/routes/deployments.ts @@ -263,9 +263,6 @@ deploymentRoutes.post( const deploymentId = c.req.param("deploymentId"); const db = createDatabase(c.env.DB); - const monitorProgress = - new URL(c.req.url).searchParams.get("monitorProgress") === "true"; - // Get organization compute credits const computeCredits = await getOrganizationComputeCredits( db, @@ -333,7 +330,6 @@ deploymentRoutes.post( nodes: workflowData.nodes, edges: workflowData.edges, }, - monitorProgress, deploymentId: deployment.id, httpRequest: { url, diff --git a/apps/api/src/routes/workflows.ts b/apps/api/src/routes/workflows.ts index 048b379b..5785215b 100644 --- a/apps/api/src/routes/workflows.ts +++ b/apps/api/src/routes/workflows.ts @@ -476,8 +476,6 @@ workflowRoutes.post( const workflowIdOrHandle = c.req.param("workflowIdOrHandle"); const version = c.req.param("version"); const db = createDatabase(c.env.DB); - const monitorProgress = - new URL(c.req.url).searchParams.get("monitorProgress") === "true"; // Get organization compute credits const computeCredits = await getOrganizationComputeCredits( @@ -642,7 +640,6 @@ workflowRoutes.post( userId, organizationId, computeCredits, - monitorProgress, deploymentId, }; diff --git a/apps/api/src/runtime/runtime.ts b/apps/api/src/runtime/runtime.ts index bb43f6c7..1219cf1b 100644 --- a/apps/api/src/runtime/runtime.ts +++ b/apps/api/src/runtime/runtime.ts @@ -2,7 +2,6 @@ import { JsonArray, JsonObject, Node, - NodeExecutionStatus, ObjectReference, Workflow, WorkflowExecution, @@ -74,7 +73,7 @@ export type RuntimeParams = { userId: string; organizationId: string; computeCredits: number; - monitorProgress?: boolean; + workflowSessionId?: string; deploymentId?: string; httpRequest?: HttpRequest; emailMessage?: EmailMessage; @@ -180,7 +179,7 @@ export class Runtime extends WorkflowEntrypoint { workflow, userId, organizationId, - monitorProgress = false, + workflowSessionId, httpRequest, emailMessage, computeCredits, @@ -313,27 +312,19 @@ export class Runtime extends WorkflowEntrypoint { ); } - // Persist progress after each execution unit if monitoring is enabled - if (monitorProgress) { - const unitDescription = - executionUnit.type === "individual" - ? executionUnit.nodeId - : `inline group [${executionUnit.nodeIds.join(", ")}]`; - - executionRecord = await step.do( - `persist after ${unitDescription}`, - Runtime.defaultStepConfig, - async () => - this.saveExecutionState( - userId, - organizationId, - workflow.id, - instanceId, - runtimeState, - executionRecord.startedAt, - executionRecord.endedAt - ) - ); + if (workflowSessionId) { + executionRecord = { + ...executionRecord, + status: runtimeState.status, + nodeExecutions: this.buildNodeExecutions(runtimeState), + }; + + this.sendExecutionUpdateToSession( + workflowSessionId, + executionRecord + ).catch((error) => { + console.error("Failed to send execution update to session:", error); + }); } } } catch (error) { @@ -376,6 +367,15 @@ export class Runtime extends WorkflowEntrypoint { ); } ); + + if (workflowSessionId) { + this.sendExecutionUpdateToSession( + workflowSessionId, + executionRecord + ).catch((error) => { + console.error("Failed to send execution update to session:", error); + }); + } } return executionRecord; @@ -412,6 +412,29 @@ export class Runtime extends WorkflowEntrypoint { }, 0); } + private async sendExecutionUpdateToSession( + workflowSessionId: string, + execution: WorkflowExecution + ): Promise { + try { + const id = this.env.WORKFLOW_SESSION.idFromName(workflowSessionId); + const stub = this.env.WORKFLOW_SESSION.get(id); + + await stub.fetch(`https://workflow-session/execution`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(execution), + }); + } catch (error) { + console.error( + `Failed to send execution update to session ${workflowSessionId}:`, + error + ); + } + } + /** * Preloads all organization secrets for synchronous access during workflow execution */ @@ -1188,44 +1211,50 @@ export class Runtime extends WorkflowEntrypoint { } /** - * Persists the workflow execution state to the database. + * Builds node execution list from runtime state */ - private async saveExecutionState( - userId: string, - organizationId: string, - workflowId: string, - instanceId: string, - runtimeState: RuntimeState, - startedAt?: Date, - endedAt?: Date - ): Promise { - // Build node execution list with explicit status for each node. - const nodeExecutionList = runtimeState.workflow.nodes.map((node) => { + private buildNodeExecutions(runtimeState: RuntimeState) { + return runtimeState.workflow.nodes.map((node) => { if (runtimeState.executedNodes.has(node.id)) { return { nodeId: node.id, - status: "completed" as NodeExecutionStatus, - outputs: runtimeState.nodeOutputs.get(node.id), + status: "completed" as const, + outputs: runtimeState.nodeOutputs.get(node.id) || {}, }; } if (runtimeState.nodeErrors.has(node.id)) { return { nodeId: node.id, - status: "error" as NodeExecutionStatus, + status: "error" as const, error: runtimeState.nodeErrors.get(node.id), }; } if (runtimeState.skippedNodes.has(node.id)) { return { nodeId: node.id, - status: "skipped" as NodeExecutionStatus, + status: "skipped" as const, }; } return { nodeId: node.id, - status: "executing" as NodeExecutionStatus, + status: "executing" as const, }; }); + } + + /** + * Persists the workflow execution state to the database. + */ + private async saveExecutionState( + userId: string, + organizationId: string, + workflowId: string, + instanceId: string, + runtimeState: RuntimeState, + startedAt?: Date, + endedAt?: Date + ): Promise { + const nodeExecutionList = this.buildNodeExecutions(runtimeState); const executionStatus = runtimeState.status; const errorMsg = diff --git a/apps/api/wrangler.jsonc b/apps/api/wrangler.jsonc index a5ba4485..74bc7926 100644 --- a/apps/api/wrangler.jsonc +++ b/apps/api/wrangler.jsonc @@ -7,13 +7,9 @@ "name": "dafthunk-api", "main": "src/index.ts", "compatibility_date": "2024-10-22", - "compatibility_flags": [ - "nodejs_compat" - ], + "compatibility_flags": ["nodejs_compat"], "triggers": { - "crons": [ - "* * * * *" - ] + "crons": ["* * * * *"] }, "observability": { "enabled": true, @@ -80,14 +76,12 @@ "class_name": "WorkflowSession", "script_name": "dafthunk-api" } - ], + ] }, "migrations": [ { "tag": "v1", - "new_classes": [ - "WorkflowSession" - ] + "new_classes": ["WorkflowSession"] } ], "unsafe": { @@ -187,14 +181,12 @@ "class_name": "WorkflowSession", "script_name": "dafthunk-api" } - ], + ] }, "migrations": [ { "tag": "v1", - "new_classes": [ - "WorkflowSession" - ] + "new_classes": ["WorkflowSession"] } ], "unsafe": { @@ -247,4 +239,4 @@ * https://developers.cloudflare.com/workers/wrangler/configuration/#service-bindings */ // "services": [{ "binding": "MY_SERVICE", "service": "my-service" }] -} \ No newline at end of file +} diff --git a/apps/web/src/hooks/use-editable-workflow.ts b/apps/web/src/hooks/use-editable-workflow.ts index 13c6da0a..44bc858a 100644 --- a/apps/web/src/hooks/use-editable-workflow.ts +++ b/apps/web/src/hooks/use-editable-workflow.ts @@ -1,4 +1,8 @@ -import type { Parameter, ParameterType } from "@dafthunk/types"; +import type { + Parameter, + ParameterType, + WorkflowExecution, +} from "@dafthunk/types"; import type { Edge, Node } from "@xyflow/react"; import { useCallback, useEffect, useRef, useState } from "react"; @@ -18,11 +22,13 @@ import { adaptDeploymentNodesToReactFlowNodes } from "@/utils/utils"; interface UseEditableWorkflowProps { workflowId: string | undefined; nodeTemplates?: NodeTemplate[]; + onExecutionUpdate?: (execution: WorkflowExecution) => void; } export function useEditableWorkflow({ workflowId, nodeTemplates = [], + onExecutionUpdate, }: UseEditableWorkflowProps) { const [nodes, setNodes] = useState[]>([]); const [edges, setEdges] = useState[]>([]); @@ -124,6 +130,10 @@ export function useEditableWorkflow({ setProcessingError(`WebSocket error: ${error}`); setIsInitializing(false); }, + onExecutionUpdate: (execution: WorkflowExecution) => { + // Forward execution updates to parent component + onExecutionUpdate?.(execution); + }, }); wsRef.current = ws; @@ -224,9 +234,19 @@ export function useEditableWorkflow({ [workflowId, isInitializing] ); - // No debouncing needed - WebSocket handles message batching naturally const saveWorkflow = saveWorkflowInternal; + const executeWorkflow = useCallback( + (options?: { parameters?: Record }) => { + if (!wsRef.current?.isConnected()) { + console.warn("WebSocket is not connected, cannot execute workflow"); + return; + } + wsRef.current.executeWorkflow(options); + }, + [] + ); + return { nodes, edges, @@ -236,5 +256,6 @@ export function useEditableWorkflow({ saveWorkflow, isWSConnected, workflowMetadata, + executeWorkflow, }; } diff --git a/apps/web/src/pages/editor-page.tsx b/apps/web/src/pages/editor-page.tsx index 21cc497e..63e75a05 100644 --- a/apps/web/src/pages/editor-page.tsx +++ b/apps/web/src/pages/editor-page.tsx @@ -94,7 +94,16 @@ export function EditorPage() { return templates; }, [nodeTypes]); - // Get workflow metadata from WebSocket connection + const executionCallbackRef = useRef< + ((execution: WorkflowExecution) => void) | null + >(null); + + const handleExecutionUpdate = useCallback((execution: WorkflowExecution) => { + if (executionCallbackRef.current) { + executionCallbackRef.current(execution); + } + }, []); + const { nodes: initialNodesForUI, edges: initialEdgesForUI, @@ -104,9 +113,11 @@ export function EditorPage() { saveWorkflow, isWSConnected: _isWSConnected, workflowMetadata, + executeWorkflow: wsExecuteWorkflow, } = useEditableWorkflow({ workflowId: id, nodeTemplates, + onExecutionUpdate: handleExecutionUpdate, }); // Now we can use workflowMetadata for cron trigger @@ -207,7 +218,7 @@ export function EditorPage() { closeExecutionForm, isEmailFormDialogVisible, submitEmailFormData, - } = useWorkflowExecution(orgHandle); + } = useWorkflowExecution(orgHandle, wsExecuteWorkflow); usePageBreadcrumbs( [ @@ -250,6 +261,8 @@ export function EditorPage() { workflowIdFromBuilder: string, onExecutionFromBuilder: (execution: WorkflowExecution) => void ) => { + executionCallbackRef.current = onExecutionFromBuilder; + return executeWorkflow( workflowIdFromBuilder, onExecutionFromBuilder, @@ -258,7 +271,7 @@ export function EditorPage() { workflowMetadata?.type ); }, - [executeWorkflow, nodeTemplates, workflowMetadata?.type] // No latestUiNodes dependency since we're using refs + [executeWorkflow, nodeTemplates, workflowMetadata?.type] ); const handleRetryLoading = () => { diff --git a/apps/web/src/services/workflow-service.ts b/apps/web/src/services/workflow-service.ts index 8e691f3f..2761e6c1 100644 --- a/apps/web/src/services/workflow-service.ts +++ b/apps/web/src/services/workflow-service.ts @@ -275,7 +275,10 @@ const wouldCreateIndirectCycle = ( /** * Hook to manage workflow execution, including parameter forms and status polling. */ -export function useWorkflowExecution(orgHandle: string) { +export function useWorkflowExecution( + orgHandle: string, + wsExecuteFn?: (options?: { parameters?: Record }) => void +) { const [isFormDialogVisible, setIsFormDialogVisible] = useState(false); const [isJsonBodyDialogVisible, setIsJsonBodyDialogVisible] = useState(false); const [isEmailFormDialogVisible, setIsEmailFormDialogVisible] = @@ -323,7 +326,7 @@ export function useWorkflowExecution(orgHandle: string) { const response = await makeOrgRequest( orgHandle, API_ENDPOINT_BASE, - `/${id}/execute/dev?monitorProgress=${request?.monitorProgress ?? true}`, + `/${id}/execute/dev`, { method: "POST", ...(request?.parameters && @@ -419,11 +422,31 @@ export function useWorkflowExecution(orgHandle: string) { cleanup(); pollingRef.current.cancelled = false; - executeAndPollWorkflow(id, { monitorProgress: true, ...request }) + if (wsExecuteFn) { + try { + wsExecuteFn({ + parameters: request?.parameters, + }); + } catch (error) { + console.error("WebSocket execution failed:", error); + onExecutionUpdate({ + id: "", + workflowId: id, + status: "error", + nodeExecutions: [], + error: + error instanceof Error + ? error.message + : "WebSocket execution failed", + }); + } + return cancelCurrentExecution; + } + + executeAndPollWorkflow(id, request) .then((initialExecution: WorkflowExecution) => { if (pollingRef.current.cancelled) return; - // Track the current execution pollingRef.current.currentExecutionId = initialExecution.id; pollingRef.current.currentWorkflowId = id; @@ -487,7 +510,13 @@ export function useWorkflowExecution(orgHandle: string) { return cancelCurrentExecution; }, - [executeAndPollWorkflow, cancelCurrentExecution, orgHandle, cleanup] + [ + wsExecuteFn, + executeAndPollWorkflow, + cancelCurrentExecution, + orgHandle, + cleanup, + ] ); const executeWorkflowWithForm = useCallback( diff --git a/apps/web/src/services/workflow-session-service.ts b/apps/web/src/services/workflow-session-service.ts index a605f36d..456cf8c1 100644 --- a/apps/web/src/services/workflow-session-service.ts +++ b/apps/web/src/services/workflow-session-service.ts @@ -81,7 +81,7 @@ export class WorkflowWebSocket { } else if (message.type === "execution_update") { this.options.onExecutionUpdate?.({ id: message.executionId, - workflowId: "", // Will be filled from context + workflowId: this.workflowId, status: message.status, nodeExecutions: message.nodeExecutions, error: message.error, @@ -157,9 +157,36 @@ export class WorkflowWebSocket { } } - executeWorkflow(executionId: string): void { + /** + * Execute workflow and receive realtime updates via WebSocket + */ + executeWorkflow(options?: { parameters?: Record }): void { if (this.ws?.readyState !== WebSocket.OPEN) { - console.warn("WebSocket is not open, cannot send execute message"); + console.warn("WebSocket is not open, cannot execute workflow"); + this.options.onError?.("WebSocket is not connected"); + return; + } + + try { + const executeMsg = { + type: "execute", + parameters: options?.parameters, + }; + this.ws.send(JSON.stringify(executeMsg)); + } catch (error) { + console.error("Failed to execute workflow:", error); + this.options.onError?.("Failed to execute workflow"); + } + } + + /** + * Register to receive updates for an existing execution + */ + registerForExecutionUpdates(executionId: string): void { + if (this.ws?.readyState !== WebSocket.OPEN) { + console.warn( + "WebSocket is not open, cannot register for execution updates" + ); return; } @@ -170,8 +197,8 @@ export class WorkflowWebSocket { }; this.ws.send(JSON.stringify(executeMsg)); } catch (error) { - console.error("Failed to send execute message:", error); - this.options.onError?.("Failed to send execute message"); + console.error("Failed to register for execution updates:", error); + this.options.onError?.("Failed to register for execution updates"); } } @@ -186,6 +213,10 @@ export class WorkflowWebSocket { isConnected(): boolean { return this.ws?.readyState === WebSocket.OPEN; } + + getWorkflowId(): string { + return this.workflowId; + } } export const connectWorkflowWS = ( diff --git a/packages/types/src/workflow.ts b/packages/types/src/workflow.ts index dd1e07a7..01a48b7c 100644 --- a/packages/types/src/workflow.ts +++ b/packages/types/src/workflow.ts @@ -395,7 +395,6 @@ export interface DeleteWorkflowResponse { * Request to execute a workflow */ export interface ExecuteWorkflowRequest { - monitorProgress?: boolean; // eslint-disable-next-line @typescript-eslint/no-explicit-any parameters?: Record; } @@ -503,10 +502,14 @@ export interface WorkflowErrorMessage { /** * Message sent from client to server to start workflow execution + * or register for execution updates */ export interface WorkflowExecuteMessage { type: "execute"; - executionId: string; + /** If provided, register for updates on this execution. If not provided, start a new execution. */ + executionId?: string; + /** Additional parameters for workflow execution */ + parameters?: Record; } /** From a6984ccb94db7e8057daf51e53c51c63362f3376 Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Tue, 7 Oct 2025 00:12:46 +0200 Subject: [PATCH 28/29] refactor: restructure runtime --- .../runtime/conditional-execution-handler.ts | 122 ++ apps/api/src/runtime/credit-manager.ts | 47 + apps/api/src/runtime/execution-persistence.ts | 121 ++ apps/api/src/runtime/execution-planner.ts | 225 ++++ apps/api/src/runtime/node-executor.ts | 302 +++++ apps/api/src/runtime/node-input-mapper.ts | 186 +++ apps/api/src/runtime/node-output-mapper.ts | 46 + apps/api/src/runtime/object-store.ts | 660 +++------- apps/api/src/runtime/runtime.ts | 1087 +---------------- apps/api/src/runtime/secret-manager.ts | 64 + 10 files changed, 1349 insertions(+), 1511 deletions(-) create mode 100644 apps/api/src/runtime/conditional-execution-handler.ts create mode 100644 apps/api/src/runtime/credit-manager.ts create mode 100644 apps/api/src/runtime/execution-persistence.ts create mode 100644 apps/api/src/runtime/execution-planner.ts create mode 100644 apps/api/src/runtime/node-executor.ts create mode 100644 apps/api/src/runtime/node-input-mapper.ts create mode 100644 apps/api/src/runtime/node-output-mapper.ts create mode 100644 apps/api/src/runtime/secret-manager.ts diff --git a/apps/api/src/runtime/conditional-execution-handler.ts b/apps/api/src/runtime/conditional-execution-handler.ts new file mode 100644 index 00000000..c91c8516 --- /dev/null +++ b/apps/api/src/runtime/conditional-execution-handler.ts @@ -0,0 +1,122 @@ +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import type { NodeInputMapper } from "./node-input-mapper"; +import type { RuntimeState } from "./runtime"; + +/** + * Handles conditional logic in workflow execution. + * Determines which nodes should be skipped based on inactive outputs and missing inputs. + */ +export class ConditionalExecutionHandler { + constructor( + private nodeRegistry: CloudflareNodeRegistry, + private inputMapper: NodeInputMapper + ) {} + + /** + * Marks nodes connected to inactive outputs as skipped. + * This is crucial for conditional logic where only one branch should execute. + */ + markInactiveOutputNodesAsSkipped( + runtimeState: RuntimeState, + nodeIdentifier: string, + nodeOutputs: Record + ): RuntimeState { + const node = runtimeState.workflow.nodes.find( + (n) => n.id === nodeIdentifier + ); + if (!node) return runtimeState; + + // Find outputs that were NOT produced + const inactiveOutputs = node.outputs + .map((output) => output.name) + .filter((outputName) => !(outputName in nodeOutputs)); + + if (inactiveOutputs.length === 0) return runtimeState; + + // Find all edges from this node's inactive outputs + const inactiveEdges = runtimeState.workflow.edges.filter( + (edge) => + edge.source === nodeIdentifier && + inactiveOutputs.includes(edge.sourceOutput) + ); + + // Process each target node of inactive edges + for (const edge of inactiveEdges) { + this.markNodeAsSkippedIfNoValidInputs(runtimeState, edge.target); + } + + return runtimeState; + } + + /** + * Marks a node as skipped if it cannot execute due to missing required inputs. + * This is smarter than recursively skipping all dependents. + */ + private markNodeAsSkippedIfNoValidInputs( + runtimeState: RuntimeState, + nodeId: string + ): void { + if ( + runtimeState.skippedNodes.has(nodeId) || + runtimeState.executedNodes.has(nodeId) + ) { + return; // Already processed + } + + const node = runtimeState.workflow.nodes.find((n) => n.id === nodeId); + if (!node) return; + + // Check if this node has all required inputs satisfied + const allRequiredInputsSatisfied = this.nodeHasAllRequiredInputsSatisfied( + runtimeState, + nodeId + ); + + // Only skip if the node cannot execute (missing required inputs) + if (!allRequiredInputsSatisfied) { + runtimeState.skippedNodes.add(nodeId); + + // Recursively check dependents of this skipped node + const outgoingEdges = runtimeState.workflow.edges.filter( + (edge) => edge.source === nodeId + ); + + for (const edge of outgoingEdges) { + this.markNodeAsSkippedIfNoValidInputs(runtimeState, edge.target); + } + } + } + + /** + * Checks if a node has all required inputs satisfied. + * A node can execute if all its required inputs are available. + */ + private nodeHasAllRequiredInputsSatisfied( + runtimeState: RuntimeState, + nodeId: string + ): boolean { + const node = runtimeState.workflow.nodes.find((n) => n.id === nodeId); + if (!node) return false; + + // Get the node type definition to check for required inputs + const executable = this.nodeRegistry.createExecutableNode(node); + if (!executable) return false; + + const nodeTypeDefinition = (executable.constructor as any).nodeType; + if (!nodeTypeDefinition) return false; + + const inputValues = this.inputMapper.collectNodeInputs( + runtimeState, + nodeId + ); + + // Check each required input based on the node type definition (not workflow node definition) + for (const input of nodeTypeDefinition.inputs) { + if (input.required && inputValues[input.name] === undefined) { + return false; // Found a required input that's missing + } + } + + return true; // All required inputs are satisfied + } +} diff --git a/apps/api/src/runtime/credit-manager.ts b/apps/api/src/runtime/credit-manager.ts new file mode 100644 index 00000000..ed452880 --- /dev/null +++ b/apps/api/src/runtime/credit-manager.ts @@ -0,0 +1,47 @@ +import type { Node } from "@dafthunk/types"; + +import type { Bindings } from "../context"; +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import { getOrganizationComputeUsage } from "../utils/credits"; + +/** + * Manages compute credits for workflow execution. + * Handles credit checks and cost calculations. + */ +export class CreditManager { + constructor( + private env: Bindings, + private nodeRegistry: CloudflareNodeRegistry + ) {} + + /** + * Checks if the organization has enough compute credits to execute a workflow. + * Credit limits are not enforced in development mode. + */ + async hasEnoughComputeCredits( + organizationId: string, + computeCredits: number, + computeCost: number + ): Promise { + // Skip credit limit enforcement in development mode + if (this.env.CLOUDFLARE_ENV === "development") { + return true; + } + + const computeUsage = await getOrganizationComputeUsage( + this.env.KV, + organizationId + ); + return computeUsage + computeCost <= computeCredits; + } + + /** + * Returns the compute cost of a list of nodes. + */ + getNodesComputeCost(nodes: Node[]): number { + return nodes.reduce((acc, node) => { + const nodeType = this.nodeRegistry.getNodeType(node.type); + return acc + (nodeType.computeCost ?? 1); + }, 0); + } +} diff --git a/apps/api/src/runtime/execution-persistence.ts b/apps/api/src/runtime/execution-persistence.ts new file mode 100644 index 00000000..dfb93647 --- /dev/null +++ b/apps/api/src/runtime/execution-persistence.ts @@ -0,0 +1,121 @@ +import type { WorkflowExecution } from "@dafthunk/types"; + +import type { Bindings } from "../context"; +import { createDatabase, type ExecutionStatusType, saveExecution } from "../db"; +import type { RuntimeState } from "./runtime"; + +/** + * Handles persistence and updates for workflow executions. + * Manages database storage and WebSocket updates to sessions. + */ +export class ExecutionPersistence { + constructor(private env: Bindings) {} + + /** + * Sends execution update to workflow session via WebSocket + */ + async sendExecutionUpdateToSession( + workflowSessionId: string, + execution: WorkflowExecution + ): Promise { + try { + const id = this.env.WORKFLOW_SESSION.idFromName(workflowSessionId); + const stub = this.env.WORKFLOW_SESSION.get(id); + + await stub.fetch(`https://workflow-session/execution`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(execution), + }); + } catch (error) { + console.error( + `Failed to send execution update to session ${workflowSessionId}:`, + error + ); + } + } + + /** + * Builds node execution list from runtime state + */ + buildNodeExecutions(runtimeState: RuntimeState) { + return runtimeState.workflow.nodes.map((node) => { + if (runtimeState.executedNodes.has(node.id)) { + return { + nodeId: node.id, + status: "completed" as const, + outputs: runtimeState.nodeOutputs.get(node.id) || {}, + }; + } + if (runtimeState.nodeErrors.has(node.id)) { + return { + nodeId: node.id, + status: "error" as const, + error: runtimeState.nodeErrors.get(node.id), + }; + } + if (runtimeState.skippedNodes.has(node.id)) { + return { + nodeId: node.id, + status: "skipped" as const, + }; + } + return { + nodeId: node.id, + status: "executing" as const, + }; + }); + } + + /** + * Persists the workflow execution state to the database. + */ + async saveExecutionState( + userId: string, + organizationId: string, + workflowId: string, + instanceId: string, + runtimeState: RuntimeState, + startedAt?: Date, + endedAt?: Date + ): Promise { + const nodeExecutionList = this.buildNodeExecutions(runtimeState); + + const executionStatus = runtimeState.status; + const errorMsg = + runtimeState.nodeErrors.size > 0 + ? Array.from(runtimeState.nodeErrors.values()).join(", ") + : undefined; + + try { + const db = createDatabase(this.env.DB); + return await saveExecution(db, { + id: instanceId, + workflowId, + userId, + organizationId, + status: executionStatus as ExecutionStatusType, + nodeExecutions: nodeExecutionList, + error: errorMsg, + updatedAt: new Date(), + startedAt, + endedAt, + }); + } catch (error) { + console.error("Failed to persist execution record:", error); + // Continue without interrupting the workflow. + } + + return { + id: instanceId, + workflowId, + status: executionStatus, + nodeExecutions: nodeExecutionList, + error: errorMsg, + startedAt, + endedAt, + }; + } +} diff --git a/apps/api/src/runtime/execution-planner.ts b/apps/api/src/runtime/execution-planner.ts new file mode 100644 index 00000000..ae81feed --- /dev/null +++ b/apps/api/src/runtime/execution-planner.ts @@ -0,0 +1,225 @@ +import type { Workflow } from "@dafthunk/types"; + +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import type { ExecutionPlan } from "./runtime"; + +/** + * Creates execution plans for workflows by analyzing the node graph. + * Handles topological ordering and groups consecutive inlinable nodes together. + */ +export class ExecutionPlanner { + constructor(private nodeRegistry: CloudflareNodeRegistry) {} + + /** + * Creates an execution plan that groups consecutive inlinable nodes together. + * Enhanced version that can handle branching patterns within groups. + * + * Examples of patterns that can now be inlined: + * + * Fan-out pattern: + * A → B + * A → C [A, B, C] can be grouped together + * + * Fan-in pattern: + * A → C + * B → C [A, B, C] can be grouped together + * + * Tree pattern: + * A → B → D + * A → C → D [A, B, C, D] can be grouped together + * + * The old linear approach would have executed these as separate steps, + * but now they execute in a single Cloudflare workflow step. + */ + createExecutionPlan( + workflow: Workflow, + orderedNodes: string[] + ): ExecutionPlan { + const plan: ExecutionPlan = []; + const processedNodes = new Set(); + let totalInlineGroups = 0; + let totalInlinedNodes = 0; + + for (let i = 0; i < orderedNodes.length; i++) { + const nodeId = orderedNodes[i]; + + if (processedNodes.has(nodeId)) { + continue; // Already processed in a group + } + + const node = workflow.nodes.find((n) => n.id === nodeId); + if (!node) continue; + + const nodeType = this.nodeRegistry.getNodeType(node.type); + const isInlinable = nodeType.inlinable ?? false; + + if (isInlinable) { + // Look ahead to find a group of connected inlinable nodes + const inlineGroup = this.findConnectedInlinableGroup( + workflow, + nodeId, + orderedNodes, + i, + processedNodes + ); + + if (inlineGroup.length === 1) { + // Single node - add as individual + plan.push({ type: "individual", nodeId: inlineGroup[0] }); + } else { + // Multiple nodes - add as inline group + plan.push({ type: "inline", nodeIds: [...inlineGroup] }); + totalInlineGroups++; + totalInlinedNodes += inlineGroup.length; + } + + // Mark all nodes in the group as processed + inlineGroup.forEach((id) => processedNodes.add(id)); + } else { + // Non-inlinable node - add as individual + plan.push({ type: "individual", nodeId }); + processedNodes.add(nodeId); + } + } + + // Log metrics for performance analysis + if (totalInlineGroups > 0) { + const totalInlinableNodes = orderedNodes.filter((nodeId) => { + const node = workflow.nodes.find((n) => n.id === nodeId); + if (!node) return false; + const nodeType = this.nodeRegistry.getNodeType(node.type); + return nodeType.inlinable ?? false; + }).length; + + const inliningEfficiency = + (totalInlinedNodes / totalInlinableNodes) * 100; + console.log( + `Execution plan optimized: ${totalInlineGroups} inline groups containing ${totalInlinedNodes}/${totalInlinableNodes} inlinable nodes (${inliningEfficiency.toFixed(1)}% efficiency)` + ); + + // Log individual group sizes for analysis + const groupSizes = plan + .filter((unit) => unit.type === "inline") + .map((unit) => (unit.type === "inline" ? unit.nodeIds.length : 0)); + + console.log(`Group sizes: [${groupSizes.join(", ")}]`); + } + + return plan; + } + + /** + * Finds a connected group of inlinable nodes starting from a given node. + * Uses a simple algorithm: expand the group as long as all dependencies are satisfied. + */ + private findConnectedInlinableGroup( + workflow: Workflow, + startNodeId: string, + orderedNodes: string[], + startIndex: number, + alreadyProcessed: Set + ): string[] { + const group = [startNodeId]; + const groupSet = new Set([startNodeId]); + + // Look ahead in the topological order for nodes that can be added to this group + for (let i = startIndex + 1; i < orderedNodes.length; i++) { + const candidateId = orderedNodes[i]; + + // Skip if already processed or not inlinable + if (alreadyProcessed.has(candidateId)) continue; + + const candidateNode = workflow.nodes.find((n) => n.id === candidateId); + if (!candidateNode) continue; + + const candidateNodeType = this.nodeRegistry.getNodeType( + candidateNode.type + ); + if (!(candidateNodeType.inlinable ?? false)) continue; + + // Check if this candidate can be safely added to the group + if ( + this.canSafelyAddToGroup( + workflow, + candidateId, + groupSet, + orderedNodes, + startIndex + ) + ) { + group.push(candidateId); + groupSet.add(candidateId); + } + } + + return group; + } + + /** + * Simplified check: a node can be added to a group if all its dependencies + * are either already executed or in the current group. + */ + private canSafelyAddToGroup( + workflow: Workflow, + nodeId: string, + currentGroupSet: Set, + orderedNodes: string[], + groupStartIndex: number + ): boolean { + // Get all dependencies of this node + const dependencies = workflow.edges + .filter((edge) => edge.target === nodeId) + .map((edge) => edge.source); + + // Check each dependency + for (const depId of dependencies) { + const isInGroup = currentGroupSet.has(depId); + const depIndex = orderedNodes.indexOf(depId); + const isAlreadyExecuted = depIndex < groupStartIndex; + + if (!isInGroup && !isAlreadyExecuted) { + return false; // Has unmet dependency + } + } + + return true; + } + + /** + * Calculates a topological ordering of nodes. Returns an empty array if a cycle is detected. + */ + createTopologicalOrder(workflow: Workflow): string[] { + const inDegree: Record = {}; + const adjacency: Record = {}; + + for (const node of workflow.nodes) { + inDegree[node.id] = 0; + adjacency[node.id] = []; + } + + for (const edge of workflow.edges) { + adjacency[edge.source].push(edge.target); + inDegree[edge.target] += 1; + } + + const queue: string[] = Object.keys(inDegree).filter( + (id) => inDegree[id] === 0 + ); + const ordered: string[] = []; + + while (queue.length > 0) { + const current = queue.shift()!; + ordered.push(current); + + for (const neighbour of adjacency[current]) { + inDegree[neighbour] -= 1; + if (inDegree[neighbour] === 0) { + queue.push(neighbour); + } + } + } + + // If ordering missed nodes, a cycle exists. + return ordered.length === workflow.nodes.length ? ordered : []; + } +} diff --git a/apps/api/src/runtime/node-executor.ts b/apps/api/src/runtime/node-executor.ts new file mode 100644 index 00000000..e8842b20 --- /dev/null +++ b/apps/api/src/runtime/node-executor.ts @@ -0,0 +1,302 @@ +import type { Bindings } from "../context"; +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import type { CloudflareToolRegistry } from "../nodes/cloudflare-tool-registry"; +import type { HttpRequest, NodeContext } from "../nodes/types"; +import type { EmailMessage } from "../nodes/types"; +import type { ConditionalExecutionHandler } from "./conditional-execution-handler"; +import type { NodeInputMapper } from "./node-input-mapper"; +import type { NodeOutputMapper } from "./node-output-mapper"; +import { ObjectStore } from "./object-store"; +import type { NodeOutputs, RuntimeState } from "./runtime"; + +/** + * Executes workflow nodes. + * Handles both individual nodes and groups of inlinable nodes. + */ +export class NodeExecutor { + constructor( + private env: Bindings, + private nodeRegistry: CloudflareNodeRegistry, + private toolRegistry: CloudflareToolRegistry, + private inputMapper: NodeInputMapper, + private outputMapper: NodeOutputMapper, + private conditionalHandler: ConditionalExecutionHandler + ) {} + + /** + * Executes a group of inlinable nodes sequentially in a single step. + */ + async executeInlineGroup( + runtimeState: RuntimeState, + workflowId: string, + nodeIds: string[], + organizationId: string, + executionId: string, + secrets: Record, + httpRequest?: HttpRequest, + emailMessage?: EmailMessage + ): Promise { + let currentState = runtimeState; + const groupStartTime = Date.now(); + const executedNodesInGroup: string[] = []; + + console.log(`Starting inline group execution: [${nodeIds.join(", ")}]`); + + // Execute each node in the group sequentially + for (const nodeId of nodeIds) { + // Skip nodes that were already marked as failed or skipped + if ( + currentState.nodeErrors.has(nodeId) || + currentState.skippedNodes.has(nodeId) + ) { + console.log( + `Skipping node ${nodeId} in inline group (already failed/skipped)` + ); + continue; + } + + try { + const nodeStartTime = Date.now(); + + currentState = await this.executeNode( + currentState, + workflowId, + nodeId, + organizationId, + executionId, + secrets, + httpRequest, + emailMessage + ); + + const nodeExecutionTime = Date.now() - nodeStartTime; + + // If execution failed, break the inline group execution + if (currentState.nodeErrors.has(nodeId)) { + console.log( + `Node ${nodeId} failed in inline group after ${nodeExecutionTime}ms, stopping group execution` + ); + break; + } + + executedNodesInGroup.push(nodeId); + console.log( + `Node ${nodeId} completed in inline group (${nodeExecutionTime}ms)` + ); + } catch (error) { + // Handle errors at the group level + const message = error instanceof Error ? error.message : String(error); + currentState.nodeErrors.set(nodeId, message); + currentState.status = "error"; + console.log( + `Fatal error in node ${nodeId} within inline group: ${message}` + ); + break; + } + } + + const totalGroupTime = Date.now() - groupStartTime; + console.log( + `Inline group completed: executed ${executedNodesInGroup.length}/${nodeIds.length} nodes in ${totalGroupTime}ms` + ); + + return currentState; + } + + /** + * Executes a single node and stores its outputs. + */ + async executeNode( + runtimeState: RuntimeState, + workflowId: string, + nodeIdentifier: string, + organizationId: string, + executionId: string, + secrets: Record, + httpRequest?: HttpRequest, + emailMessage?: EmailMessage + ): Promise { + const node = runtimeState.workflow.nodes.find( + (n): boolean => n.id === nodeIdentifier + ); + if (!node) { + runtimeState.nodeErrors.set( + nodeIdentifier, + `Node not found: ${nodeIdentifier}` + ); + return { ...runtimeState, status: "error" }; + } + + const nodeType = this.nodeRegistry.getNodeType(node.type); + this.env.COMPUTE.writeDataPoint({ + indexes: [organizationId], + blobs: [organizationId, workflowId, node.id], + doubles: [nodeType.computeCost ?? 1], + }); + + // Resolve the runnable implementation. + const executable = this.nodeRegistry.createExecutableNode(node); + if (!executable) { + runtimeState.nodeErrors.set( + nodeIdentifier, + `Node type not implemented: ${node.type}` + ); + return { ...runtimeState, status: "error" }; + } + + // Gather inputs by reading connections and default values. + const inputValues = this.inputMapper.collectNodeInputs( + runtimeState, + nodeIdentifier + ); + + try { + const objectStore = new ObjectStore(this.env.RESSOURCES); + const processedInputs = await this.inputMapper.mapRuntimeToNodeInputs( + runtimeState, + nodeIdentifier, + inputValues, + objectStore + ); + + // Configure AI Gateway options for all AI model requests + // If CLOUDFLARE_AI_GATEWAY_ID is set, all AI requests will be routed through the gateway + // for analytics, caching, and rate limiting. If not set, requests go directly to the model. + const aiOptions: AiOptions = {}; + const gatewayId = this.env.CLOUDFLARE_AI_GATEWAY_ID; + if (gatewayId) { + aiOptions.gateway = { + id: gatewayId, + skipCache: false, // Enable caching by default for better performance + }; + } + + const context: NodeContext = { + nodeId: nodeIdentifier, + workflowId: runtimeState.workflow.id, + organizationId, + inputs: processedInputs, + httpRequest, + emailMessage, + onProgress: () => {}, + toolRegistry: this.toolRegistry, + secrets: secrets || {}, + env: { + DB: this.env.DB, + AI: this.env.AI, + AI_OPTIONS: aiOptions, + RESSOURCES: this.env.RESSOURCES, + DATASETS: this.env.DATASETS, + DATASETS_AUTORAG: this.env.DATASETS_AUTORAG, + CLOUDFLARE_ACCOUNT_ID: this.env.CLOUDFLARE_ACCOUNT_ID, + CLOUDFLARE_API_TOKEN: this.env.CLOUDFLARE_API_TOKEN, + CLOUDFLARE_AI_GATEWAY_ID: this.env.CLOUDFLARE_AI_GATEWAY_ID, + TWILIO_ACCOUNT_SID: this.env.TWILIO_ACCOUNT_SID, + TWILIO_AUTH_TOKEN: this.env.TWILIO_AUTH_TOKEN, + TWILIO_PHONE_NUMBER: this.env.TWILIO_PHONE_NUMBER, + SENDGRID_API_KEY: this.env.SENDGRID_API_KEY, + SENDGRID_DEFAULT_FROM: this.env.SENDGRID_DEFAULT_FROM, + RESEND_API_KEY: this.env.RESEND_API_KEY, + RESEND_DEFAULT_FROM: this.env.RESEND_DEFAULT_FROM, + AWS_ACCESS_KEY_ID: this.env.AWS_ACCESS_KEY_ID, + AWS_SECRET_ACCESS_KEY: this.env.AWS_SECRET_ACCESS_KEY, + AWS_REGION: this.env.AWS_REGION, + SES_DEFAULT_FROM: this.env.SES_DEFAULT_FROM, + EMAIL_DOMAIN: this.env.EMAIL_DOMAIN, + OPENAI_API_KEY: this.env.OPENAI_API_KEY, + ANTHROPIC_API_KEY: this.env.ANTHROPIC_API_KEY, + GEMINI_API_KEY: this.env.GEMINI_API_KEY, + HUGGINGFACE_API_KEY: this.env.HUGGINGFACE_API_KEY, + }, + }; + + const result = await executable.execute(context); + + if (result.status === "completed") { + const outputsForRuntime = + await this.outputMapper.mapNodeToRuntimeOutputs( + runtimeState, + nodeIdentifier, + result.outputs ?? {}, + objectStore, + organizationId, + executionId + ); + runtimeState.nodeOutputs.set( + nodeIdentifier, + outputsForRuntime as NodeOutputs + ); + runtimeState.executedNodes.add(nodeIdentifier); + + // After successful execution, mark nodes connected to inactive outputs as skipped + runtimeState = this.conditionalHandler.markInactiveOutputNodesAsSkipped( + runtimeState, + nodeIdentifier, + result.outputs ?? {} + ); + } else { + const failureMessage = result.error ?? "Unknown error"; + runtimeState.nodeErrors.set(nodeIdentifier, failureMessage); + runtimeState.status = "error"; + } + + // Determine final workflow status. + if (runtimeState.status !== "error") { + const allNodesVisited = runtimeState.executionPlan.every((unit) => + unit.type === "individual" + ? runtimeState.executedNodes.has(unit.nodeId) || + runtimeState.skippedNodes.has(unit.nodeId) || + runtimeState.nodeErrors.has(unit.nodeId) + : unit.type === "inline" + ? unit.nodeIds.every( + (id: string) => + runtimeState.executedNodes.has(id) || + runtimeState.skippedNodes.has(id) || + runtimeState.nodeErrors.has(id) + ) + : false + ); + runtimeState.status = + allNodesVisited && runtimeState.nodeErrors.size === 0 + ? "completed" + : "executing"; + } + + return runtimeState; + } catch (error) { + if ( + error instanceof Error && + error.message.startsWith("Required input") + ) { + runtimeState.skippedNodes.add(nodeIdentifier); + + // Determine final workflow status. + if (runtimeState.status !== "error") { + const allNodesVisited = runtimeState.executionPlan.every((unit) => + unit.type === "individual" + ? runtimeState.executedNodes.has(unit.nodeId) || + runtimeState.skippedNodes.has(unit.nodeId) || + runtimeState.nodeErrors.has(unit.nodeId) + : unit.type === "inline" + ? unit.nodeIds.every( + (id: string) => + runtimeState.executedNodes.has(id) || + runtimeState.skippedNodes.has(id) || + runtimeState.nodeErrors.has(id) + ) + : false + ); + runtimeState.status = + allNodesVisited && runtimeState.nodeErrors.size === 0 + ? "completed" + : "executing"; + } + return runtimeState; + } + const message = error instanceof Error ? error.message : String(error); + runtimeState.nodeErrors.set(nodeIdentifier, message); + runtimeState.status = "error"; + return runtimeState; + } + } +} diff --git a/apps/api/src/runtime/node-input-mapper.ts b/apps/api/src/runtime/node-input-mapper.ts new file mode 100644 index 00000000..388af2f7 --- /dev/null +++ b/apps/api/src/runtime/node-input-mapper.ts @@ -0,0 +1,186 @@ +import type { JsonArray, JsonObject, ObjectReference } from "@dafthunk/types"; + +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import { apiToNodeParameter } from "../nodes/parameter-mapper"; +import type { ObjectStore } from "./object-store"; +import type { + BasicNodeOutputValue, + NodeOutputs, + NodeOutputValue, + RuntimeState, +} from "./runtime"; + +/** + * Handles mapping and transformation of node inputs during workflow execution. + * Collects inputs from edges and default values, and transforms them to node format. + */ +export class NodeInputMapper { + constructor(private nodeRegistry: CloudflareNodeRegistry) {} + + /** + * Returns inputs for a node by checking its default values and inbound edges. + */ + collectNodeInputs( + runtimeState: RuntimeState, + nodeIdentifier: string + ): NodeOutputs { + const inputs: NodeOutputs = {}; + const node = runtimeState.workflow.nodes.find( + (n): boolean => n.id === nodeIdentifier + ); + if (!node) return inputs; + + // Defaults declared directly on the node. + for (const input of node.inputs) { + if (input.value !== undefined) { + if ( + typeof input.value === "string" || + typeof input.value === "number" || + typeof input.value === "boolean" || + (typeof input.value === "object" && input.value !== null) + ) { + inputs[input.name] = input.value as NodeOutputValue; + } + } + } + + // Values coming from connected nodes. + const inboundEdges = runtimeState.workflow.edges.filter( + (edge): boolean => edge.target === nodeIdentifier + ); + + // Group edges by target input to handle multiple connections + const edgesByInput = new Map(); + for (const edge of inboundEdges) { + const inputName = edge.targetInput; + if (!edgesByInput.has(inputName)) { + edgesByInput.set(inputName, []); + } + edgesByInput.get(inputName)!.push(edge); + } + + // Process each input's connections + for (const [inputName, edges] of edgesByInput) { + // Get the node type definition to check repeated + const executable = this.nodeRegistry.createExecutableNode(node); + const nodeTypeDefinition = executable + ? (executable.constructor as any).nodeType + : null; + const nodeTypeInput = nodeTypeDefinition?.inputs?.find( + (input: any) => input.name === inputName + ); + + // Check repeated from node type definition (not workflow node) + const acceptsMultiple = nodeTypeInput?.repeated || false; + + const values: BasicNodeOutputValue[] = []; + + for (const edge of edges) { + const sourceOutputs = runtimeState.nodeOutputs.get(edge.source); + if (sourceOutputs && sourceOutputs[edge.sourceOutput] !== undefined) { + const value = sourceOutputs[edge.sourceOutput]; + if ( + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + (typeof value === "object" && value !== null) + ) { + values.push(value as BasicNodeOutputValue); + } + } + } + + if (values.length > 0) { + if (acceptsMultiple) { + // For parameters that accept multiple connections, provide an array + inputs[inputName] = values; + } else { + // For single connection parameters, use the last value (current behavior) + inputs[inputName] = values[values.length - 1]; + } + } + } + + return inputs; + } + + /** + * Converts raw runtime inputs to the representation expected by the node. + */ + async mapRuntimeToNodeInputs( + runtimeState: RuntimeState, + nodeIdentifier: string, + inputValues: Record, + objectStore: ObjectStore + ): Promise> { + const node = runtimeState.workflow.nodes.find( + (n) => n.id === nodeIdentifier + ); + if (!node) throw new Error(`Node ${nodeIdentifier} not found`); + + const processed: Record = {}; + + for (const definition of node.inputs) { + const { name, type, required } = definition; + const value = inputValues[name]; + + if (required && value === undefined) { + throw new Error( + `Required input '${name}' missing for node ${nodeIdentifier}` + ); + } + if (value === undefined || value === null) continue; + + // Check if this parameter accepts multiple connections + const executable = this.nodeRegistry.createExecutableNode(node); + const nodeTypeDefinition = executable + ? (executable.constructor as any).nodeType + : null; + const nodeTypeInput = nodeTypeDefinition?.inputs?.find( + (input: any) => input.name === name + ); + const acceptsMultiple = nodeTypeInput?.repeated || false; + + // Handle secret parameters as strings since secrets are preloaded in context + const parameterType = type === "secret" ? "string" : type; + + if (acceptsMultiple && Array.isArray(value)) { + // For parameters that accept multiple connections, process each value individually + const processedArray = []; + for (const singleValue of value) { + const validSingleValue = singleValue as + | string + | number + | boolean + | ObjectReference + | JsonArray + | JsonObject; + const processedSingleValue = await apiToNodeParameter( + parameterType, + validSingleValue, + objectStore + ); + processedArray.push(processedSingleValue); + } + processed[name] = processedArray; + } else { + // Single value processing (existing logic) + const validValue = value as + | string + | number + | boolean + | ObjectReference + | JsonArray + | JsonObject; + const processedValue = await apiToNodeParameter( + parameterType, + validValue, + objectStore + ); + processed[name] = processedValue; + } + } + + return processed; + } +} diff --git a/apps/api/src/runtime/node-output-mapper.ts b/apps/api/src/runtime/node-output-mapper.ts new file mode 100644 index 00000000..cf5e4a53 --- /dev/null +++ b/apps/api/src/runtime/node-output-mapper.ts @@ -0,0 +1,46 @@ +import { nodeToApiParameter } from "../nodes/parameter-mapper"; +import type { ObjectStore } from "./object-store"; +import type { RuntimeState } from "./runtime"; + +/** + * Handles transformation of node outputs to runtime format. + * Converts node outputs to serializable representations for storage. + */ +export class NodeOutputMapper { + /** + * Converts node outputs to a serialisable runtime representation. + */ + async mapNodeToRuntimeOutputs( + runtimeState: RuntimeState, + nodeIdentifier: string, + outputsFromNode: Record, + objectStore: ObjectStore, + organizationId: string, + executionId: string + ): Promise> { + const node = runtimeState.workflow.nodes.find( + (n) => n.id === nodeIdentifier + ); + if (!node) throw new Error(`Node ${nodeIdentifier} not found`); + + const processed: Record = {}; + + for (const definition of node.outputs) { + const { name, type } = definition; + const value = outputsFromNode[name]; + if (value === undefined || value === null) continue; + + // Handle secret parameters as strings since secrets are preloaded in context + const parameterType = type === "secret" ? "string" : type; + + processed[name] = await nodeToApiParameter( + parameterType, + value, + objectStore, + organizationId, + executionId + ); + } + return processed; + } +} diff --git a/apps/api/src/runtime/object-store.ts b/apps/api/src/runtime/object-store.ts index 235d89d8..ecb4693b 100644 --- a/apps/api/src/runtime/object-store.ts +++ b/apps/api/src/runtime/object-store.ts @@ -1,16 +1,13 @@ import { ObjectReference, Workflow, WorkflowExecution } from "@dafthunk/types"; import { v7 as uuid } from "uuid"; +/** + * Manages R2 storage for objects, workflows, and executions. + * Uses helper methods to eliminate duplication in logging and error handling. + */ export class ObjectStore { - private bucket: R2Bucket; + constructor(private bucket: R2Bucket) {} - constructor(bucket: R2Bucket) { - this.bucket = bucket; - } - - /** - * Write a binary object to storage and return a reference - */ async writeObject( data: Uint8Array, mimeType: string, @@ -27,9 +24,6 @@ export class ObjectStore { ); } - /** - * Write a binary object to storage and return a reference - */ async writeObjectWithId( id: string, data: Uint8Array, @@ -37,141 +31,56 @@ export class ObjectStore { organizationId: string, executionId?: string ): Promise { - try { - console.log( - `ObjectStore.writeObjectWithId: Starting to write object with id ${id}` - ); - - if (!this.bucket) { - console.error( - "ObjectStore.writeObjectWithId: R2 bucket is not initialized" - ); - throw new Error("R2 bucket is not initialized"); - } - - const key = `objects/${id}/object.data`; - console.log( - `ObjectStore.writeObjectWithId: Attempting to store object with key ${key}` - ); - - const customMetadataForR2: { [key: string]: string } = { - id, - createdAt: new Date().toISOString(), - organizationId, - }; - - if (executionId) { - customMetadataForR2.executionId = executionId; - } + const customMetadata: Record = { + id, + createdAt: new Date().toISOString(), + organizationId, + }; + if (executionId) { + customMetadata.executionId = executionId; + } - const writeResult = await this.bucket.put(key, data, { + await this.writeToR2( + `objects/${id}/object.data`, + data, + { httpMetadata: { contentType: mimeType, cacheControl: "public, max-age=31536000", }, - customMetadata: customMetadataForR2, - }); - - console.log( - `ObjectStore.writeObjectWithId: Successfully stored object ${id}, etag: ${writeResult?.etag || "unknown"}` - ); + customMetadata, + }, + "writeObjectWithId" + ); - return { - id, - mimeType: mimeType, - }; - } catch (error) { - console.error( - "ObjectStore.writeObjectWithId: Failed to write object to R2:", - error - ); - throw error; - } + return { id, mimeType }; } - /** - * Read an object from storage using its reference - */ async readObject(reference: ObjectReference): Promise<{ data: Uint8Array; metadata: R2Object["customMetadata"]; } | null> { - try { - console.log( - `ObjectStore.read: Attempting to read object with id ${reference.id}` - ); - - if (!this.bucket) { - console.error("ObjectStore.read: R2 bucket is not initialized"); - throw new Error("R2 bucket is not initialized"); - } - - const key = `objects/${reference.id}/object.data`; - console.log(`ObjectStore.readObject: Getting object with key ${key}`); - - const object = await this.bucket.get(key); - - if (!object) { - console.log(`ObjectStore.readObject: Object not found with key ${key}`); - console.error(`ObjectStore.read: Object not found: ${reference.id}`); - return null; - } + const object = await this.readFromR2( + `objects/${reference.id}/object.data`, + "readObject" + ); - console.log( - `ObjectStore.readObject: Retrieved object ${reference.id}, size: ${object.size} bytes` - ); + if (!object) return null; - const data = await object.arrayBuffer(); - console.log( - `ObjectStore.read: Successfully read object ${reference.id}, size: ${data.byteLength} bytes` - ); - return { - data: new Uint8Array(data), - metadata: object.customMetadata, - }; - } catch (error) { - console.error( - `ObjectStore.read: Failed to read object ${reference.id}:`, - error - ); - throw error; - } + const arrayBuffer = await object.arrayBuffer(); + return { + data: new Uint8Array(arrayBuffer), + metadata: object.customMetadata, + }; } - /** - * Delete an object from storage using its reference - */ async deleteObject(reference: ObjectReference): Promise { - try { - console.log( - `ObjectStore.delete: Attempting to delete object with id ${reference.id}` - ); - - if (!this.bucket) { - console.error("ObjectStore.delete: R2 bucket is not initialized"); - throw new Error("R2 bucket is not initialized"); - } - - const key = `objects/${reference.id}/object.data`; - console.log(`ObjectStore.deleteObject: Deleting object with key ${key}`); - - await this.bucket.delete(key); - console.log(`ObjectStore.deleteObject: Deleted object with key ${key}`); - console.log( - `ObjectStore.delete: Successfully deleted object ${reference.id}` - ); - } catch (error) { - console.error( - `ObjectStore.delete: Failed to delete object ${reference.id}:`, - error - ); - throw error; - } + await this.deleteFromR2( + `objects/${reference.id}/object.data`, + "deleteObject" + ); } - /** - * List objects for an organization - */ async listObjects(organizationId: string): Promise< { id: string; @@ -182,81 +91,30 @@ export class ObjectStore { executionId?: string; }[] > { - try { - console.log( - `ObjectStore.listObjects: Listing objects for organization ${organizationId}` - ); - - if (!this.bucket) { - console.error("ObjectStore.listObjects: R2 bucket is not initialized"); - throw new Error("R2 bucket is not initialized"); - } - - const prefix = "objects/"; - const objects = await this.bucket.list({ prefix }); - - const filteredObjects = []; - - // Filter objects by organizationId and parse metadata - for (const obj of objects.objects) { - if (obj.customMetadata?.organizationId === organizationId) { - // Extract object ID from the key path (objects/{id}/object.data) - const keyParts = obj.key.split("/"); - const id = keyParts[1]; // The ID should be in the second position - - // We don't know the MIME type from the listing, so we use the content-type from httpMetadata - const mimeType = - obj.httpMetadata?.contentType || "application/octet-stream"; - - filteredObjects.push({ - id, - mimeType, - size: obj.size, - createdAt: obj.customMetadata?.createdAt - ? new Date(obj.customMetadata.createdAt) - : new Date(), - organizationId, - executionId: obj.customMetadata?.executionId, - }); - } - } - - console.log( - `ObjectStore.listObjects: Found ${filteredObjects.length} objects for organization ${organizationId}` - ); - - return filteredObjects; - } catch (error) { - console.error( - `ObjectStore.listObjects: Failed to list objects for organization ${organizationId}:`, - error - ); - throw error; - } + const objects = await this.listFromR2("objects/", "listObjects"); + + return objects.objects + .filter((obj) => obj.customMetadata?.organizationId === organizationId) + .map((obj) => { + const id = obj.key.split("/")[1]; + return { + id, + mimeType: obj.httpMetadata?.contentType || "application/octet-stream", + size: obj.size, + createdAt: obj.customMetadata?.createdAt + ? new Date(obj.customMetadata.createdAt) + : new Date(), + organizationId, + executionId: obj.customMetadata?.executionId, + }; + }); } - /** - * Write a workflow to storage - */ async writeWorkflow(workflow: Workflow): Promise { - try { - console.log( - `ObjectStore.writeWorkflow: Starting to write workflow ${workflow.id}` - ); - - if (!this.bucket) { - console.error( - "ObjectStore.writeWorkflow: R2 bucket is not initialized" - ); - throw new Error("R2 bucket is not initialized"); - } - - const key = `workflows/${workflow.id}/workflow.json`; - console.log( - `ObjectStore.writeWorkflow: Attempting to store workflow with key ${key}` - ); - - const writeResult = await this.bucket.put(key, JSON.stringify(workflow), { + await this.writeToR2( + `workflows/${workflow.id}/workflow.json`, + JSON.stringify(workflow), + { httpMetadata: { contentType: "application/json", cacheControl: "no-cache", @@ -264,133 +122,41 @@ export class ObjectStore { customMetadata: { updatedAt: new Date().toISOString(), }, - }); - - console.log( - `ObjectStore.writeWorkflow: Successfully stored workflow ${workflow.id}, etag: ${writeResult?.etag || "unknown"}` - ); - - return workflow.id; - } catch (error) { - console.error( - "ObjectStore.writeWorkflow: Failed to write workflow to R2:", - error - ); - throw error; - } + }, + "writeWorkflow" + ); + return workflow.id; } - /** - * Read a workflow from storage using its id - */ async readWorkflow(workflowId: string): Promise { - try { - console.log( - `ObjectStore.readWorkflow: Attempting to read workflow with id ${workflowId}` - ); - - if (!this.bucket) { - console.error("ObjectStore.readWorkflow: R2 bucket is not initialized"); - throw new Error("R2 bucket is not initialized"); - } - - const key = `workflows/${workflowId}/workflow.json`; - console.log(`ObjectStore.readWorkflow: Getting workflow with key ${key}`); - - const object = await this.bucket.get(key); - - if (!object) { - console.log( - `ObjectStore.readWorkflow: Workflow not found with key ${key}` - ); - console.error( - `ObjectStore.readWorkflow: Workflow not found: ${workflowId}` - ); - throw new Error(`Workflow not found: ${workflowId}`); - } - - console.log( - `ObjectStore.readWorkflow: Retrieved workflow ${workflowId}, size: ${object.size} bytes` - ); - - const text = await object.text(); - const workflow = JSON.parse(text) as Workflow; - console.log( - `ObjectStore.readWorkflow: Successfully read workflow ${workflowId}` - ); + const object = await this.readFromR2( + `workflows/${workflowId}/workflow.json`, + "readWorkflow" + ); - return workflow; - } catch (error) { - console.error( - `ObjectStore.readWorkflow: Failed to read workflow ${workflowId}:`, - error - ); - throw error; + if (!object) { + throw new Error(`Workflow not found: ${workflowId}`); } + + const text = await object.text(); + return JSON.parse(text) as Workflow; } - /** - * Delete a workflow from storage using its id - */ async deleteWorkflow(workflowId: string): Promise { - try { - console.log( - `ObjectStore.deleteWorkflow: Attempting to delete workflow with id ${workflowId}` - ); - - if (!this.bucket) { - console.error( - "ObjectStore.deleteWorkflow: R2 bucket is not initialized" - ); - throw new Error("R2 bucket is not initialized"); - } - - const key = `workflows/${workflowId}/workflow.json`; - console.log( - `ObjectStore.deleteWorkflow: Deleting workflow with key ${key}` - ); - - await this.bucket.delete(key); - console.log( - `ObjectStore.deleteWorkflow: Deleted workflow with key ${key}` - ); - console.log( - `ObjectStore.deleteWorkflow: Successfully deleted workflow ${workflowId}` - ); - } catch (error) { - console.error( - `ObjectStore.deleteWorkflow: Failed to delete workflow ${workflowId}:`, - error - ); - throw error; - } + await this.deleteFromR2( + `workflows/${workflowId}/workflow.json`, + "deleteWorkflow" + ); } - /** - * Write a workflow to execution storage - */ async writeExecutionWorkflow( executionId: string, workflow: Workflow ): Promise { - try { - console.log( - `ObjectStore.writeExecutionWorkflow: Starting to write workflow for execution ${executionId}` - ); - - if (!this.bucket) { - console.error( - "ObjectStore.writeExecutionWorkflow: R2 bucket is not initialized" - ); - throw new Error("R2 bucket is not initialized"); - } - - const key = `executions/${executionId}/workflow.json`; - console.log( - `ObjectStore.writeExecutionWorkflow: Attempting to store workflow with key ${key}` - ); - - const writeResult = await this.bucket.put(key, JSON.stringify(workflow), { + await this.writeToR2( + `executions/${executionId}/workflow.json`, + JSON.stringify(workflow), + { httpMetadata: { contentType: "application/json", cacheControl: "no-cache", @@ -400,247 +166,163 @@ export class ObjectStore { workflowId: workflow.id, updatedAt: new Date().toISOString(), }, - }); - - console.log( - `ObjectStore.writeExecutionWorkflow: Successfully stored workflow for execution ${executionId}, etag: ${writeResult?.etag || "unknown"}` - ); - - return executionId; - } catch (error) { - console.error( - `ObjectStore.writeExecutionWorkflow: Failed to write workflow to R2 for execution ${executionId}:`, - error - ); - throw error; - } + }, + "writeExecutionWorkflow" + ); + return executionId; } - /** - * Read a workflow from execution storage using its execution id - */ async readExecutionWorkflow(executionId: string): Promise { - try { - console.log( - `ObjectStore.readExecutionWorkflow: Attempting to read workflow for execution ${executionId}` - ); - - if (!this.bucket) { - console.error( - "ObjectStore.readExecutionWorkflow: R2 bucket is not initialized" - ); - throw new Error("R2 bucket is not initialized"); - } + const object = await this.readFromR2( + `executions/${executionId}/workflow.json`, + "readExecutionWorkflow" + ); - const key = `executions/${executionId}/workflow.json`; - console.log( - `ObjectStore.readExecutionWorkflow: Getting workflow with key ${key}` - ); + if (!object) { + throw new Error(`Workflow not found for execution: ${executionId}`); + } - const object = await this.bucket.get(key); + const text = await object.text(); + return JSON.parse(text) as Workflow; + } - if (!object) { - console.log( - `ObjectStore.readExecutionWorkflow: Workflow not found with key ${key}` - ); - console.error( - `ObjectStore.readExecutionWorkflow: Workflow not found for execution: ${executionId}` - ); - throw new Error(`Workflow not found for execution: ${executionId}`); - } + async deleteExecutionWorkflow(executionId: string): Promise { + await this.deleteFromR2( + `executions/${executionId}/workflow.json`, + "deleteExecutionWorkflow" + ); + } - console.log( - `ObjectStore.readExecutionWorkflow: Retrieved workflow for execution ${executionId}, size: ${object.size} bytes` - ); + async writeExecution(execution: WorkflowExecution): Promise { + await this.writeToR2( + `executions/${execution.id}/execution.json`, + JSON.stringify(execution), + { + httpMetadata: { + contentType: "application/json", + cacheControl: "no-cache", + }, + customMetadata: { + workflowId: execution.workflowId, + status: execution.status, + updatedAt: new Date().toISOString(), + }, + }, + "writeExecution" + ); + return execution.id; + } - const text = await object.text(); - const workflow = JSON.parse(text) as Workflow; - console.log( - `ObjectStore.readExecutionWorkflow: Successfully read workflow for execution ${executionId}` - ); + async readExecution(executionId: string): Promise { + const object = await this.readFromR2( + `executions/${executionId}/execution.json`, + "readExecution" + ); - return workflow; - } catch (error) { - console.error( - `ObjectStore.readExecutionWorkflow: Failed to read workflow for execution ${executionId}:`, - error - ); - throw error; + if (!object) { + throw new Error(`Execution not found: ${executionId}`); } + + const text = await object.text(); + return JSON.parse(text) as WorkflowExecution; } - /** - * Delete a workflow from execution storage using its execution id - */ - async deleteExecutionWorkflow(executionId: string): Promise { + async deleteExecution(executionId: string): Promise { + await this.deleteFromR2( + `executions/${executionId}/execution.json`, + "deleteExecution" + ); + } + + private async writeToR2( + key: string, + data: string | ArrayBuffer | Uint8Array, + options: R2PutOptions, + operation: string + ): Promise { try { - console.log( - `ObjectStore.deleteExecutionWorkflow: Attempting to delete workflow for execution ${executionId}` - ); + console.log(`ObjectStore.${operation}: Writing to ${key}`); if (!this.bucket) { - console.error( - "ObjectStore.deleteExecutionWorkflow: R2 bucket is not initialized" - ); throw new Error("R2 bucket is not initialized"); } - const key = `executions/${executionId}/workflow.json`; - console.log( - `ObjectStore.deleteExecutionWorkflow: Deleting workflow with key ${key}` - ); - - await this.bucket.delete(key); - console.log( - `ObjectStore.deleteExecutionWorkflow: Deleted workflow with key ${key}` - ); + const result = await this.bucket.put(key, data, options); console.log( - `ObjectStore.deleteExecutionWorkflow: Successfully deleted workflow for execution ${executionId}` + `ObjectStore.${operation}: Success, etag: ${result?.etag || "unknown"}` ); } catch (error) { - console.error( - `ObjectStore.deleteExecutionWorkflow: Failed to delete workflow for execution ${executionId}:`, - error - ); + console.error(`ObjectStore.${operation}: Failed to write ${key}:`, error); throw error; } } - /** - * Write an execution to storage - */ - async writeExecution(execution: WorkflowExecution): Promise { + private async readFromR2( + key: string, + operation: string + ): Promise { try { - console.log( - `ObjectStore.writeExecution: Starting to write execution ${execution.id}` - ); + console.log(`ObjectStore.${operation}: Reading from ${key}`); if (!this.bucket) { - console.error( - "ObjectStore.writeExecution: R2 bucket is not initialized" - ); throw new Error("R2 bucket is not initialized"); } - const key = `executions/${execution.id}/execution.json`; - console.log( - `ObjectStore.writeExecution: Attempting to store execution with key ${key}` - ); + const object = await this.bucket.get(key); - const writeResult = await this.bucket.put( - key, - JSON.stringify(execution), - { - httpMetadata: { - contentType: "application/json", - cacheControl: "no-cache", - }, - customMetadata: { - workflowId: execution.workflowId, - status: execution.status, - updatedAt: new Date().toISOString(), - }, - } - ); + if (!object) { + console.log(`ObjectStore.${operation}: Not found at ${key}`); + return null; + } console.log( - `ObjectStore.writeExecution: Successfully stored execution ${execution.id}, etag: ${writeResult?.etag || "unknown"}` + `ObjectStore.${operation}: Success, size: ${object.size} bytes` ); - - return execution.id; + return object; } catch (error) { - console.error( - `ObjectStore.writeExecution: Failed to write execution to R2:`, - error - ); + console.error(`ObjectStore.${operation}: Failed to read ${key}:`, error); throw error; } } - /** - * Read an execution from storage using its id - */ - async readExecution(executionId: string): Promise { + private async deleteFromR2(key: string, operation: string): Promise { try { - console.log( - `ObjectStore.readExecution: Attempting to read execution with id ${executionId}` - ); + console.log(`ObjectStore.${operation}: Deleting ${key}`); if (!this.bucket) { - console.error( - "ObjectStore.readExecution: R2 bucket is not initialized" - ); throw new Error("R2 bucket is not initialized"); } - const key = `executions/${executionId}/execution.json`; - console.log( - `ObjectStore.readExecution: Getting execution with key ${key}` - ); - - const object = await this.bucket.get(key); - - if (!object) { - console.log( - `ObjectStore.readExecution: Execution not found with key ${key}` - ); - console.error( - `ObjectStore.readExecution: Execution not found: ${executionId}` - ); - throw new Error(`Execution not found: ${executionId}`); - } - - console.log( - `ObjectStore.readExecution: Retrieved execution ${executionId}, size: ${object.size} bytes` - ); - - const text = await object.text(); - const execution = JSON.parse(text) as WorkflowExecution; - console.log( - `ObjectStore.readExecution: Successfully read execution ${executionId}` - ); - - return execution; + await this.bucket.delete(key); + console.log(`ObjectStore.${operation}: Successfully deleted ${key}`); } catch (error) { console.error( - `ObjectStore.readExecution: Failed to read execution ${executionId}:`, + `ObjectStore.${operation}: Failed to delete ${key}:`, error ); throw error; } } - /** - * Delete an execution from storage using its id - */ - async deleteExecution(executionId: string): Promise { + private async listFromR2( + prefix: string, + operation: string + ): Promise { try { - console.log( - `ObjectStore.deleteExecution: Attempting to delete execution with id ${executionId}` - ); + console.log(`ObjectStore.${operation}: Listing with prefix ${prefix}`); if (!this.bucket) { - console.error( - "ObjectStore.deleteExecution: R2 bucket is not initialized" - ); throw new Error("R2 bucket is not initialized"); } - const key = `executions/${executionId}/execution.json`; - console.log( - `ObjectStore.deleteExecution: Deleting execution with key ${key}` - ); - - await this.bucket.delete(key); - console.log( - `ObjectStore.deleteExecution: Deleted execution with key ${key}` - ); + const objects = await this.bucket.list({ prefix }); console.log( - `ObjectStore.deleteExecution: Successfully deleted execution ${executionId}` + `ObjectStore.${operation}: Found ${objects.objects.length} objects` ); + return objects; } catch (error) { console.error( - `ObjectStore.deleteExecution: Failed to delete execution ${executionId}:`, + `ObjectStore.${operation}: Failed to list with prefix ${prefix}:`, error ); throw error; diff --git a/apps/api/src/runtime/runtime.ts b/apps/api/src/runtime/runtime.ts index 1219cf1b..83f79097 100644 --- a/apps/api/src/runtime/runtime.ts +++ b/apps/api/src/runtime/runtime.ts @@ -1,7 +1,6 @@ import { JsonArray, JsonObject, - Node, ObjectReference, Workflow, WorkflowExecution, @@ -16,26 +15,20 @@ import { import { NonRetryableError } from "cloudflare:workflows"; import { Bindings } from "../context"; -import { - createDatabase, - ExecutionStatusType, - getAllSecretsWithValues, - saveExecution, -} from "../db"; import { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; import { CloudflareToolRegistry } from "../nodes/cloudflare-tool-registry"; -import { - apiToNodeParameter, - nodeToApiParameter, -} from "../nodes/parameter-mapper"; import { HttpRequest, NodeContext } from "../nodes/types"; import { EmailMessage } from "../nodes/types"; -import { - getOrganizationComputeUsage, - updateOrganizationComputeUsage, -} from "../utils/credits"; +import { updateOrganizationComputeUsage } from "../utils/credits"; import { validateWorkflow } from "../utils/workflows"; -import { ObjectStore } from "./object-store"; +import { ConditionalExecutionHandler } from "./conditional-execution-handler"; +import { CreditManager } from "./credit-manager"; +import { ExecutionPersistence } from "./execution-persistence"; +import { ExecutionPlanner } from "./execution-planner"; +import { NodeExecutor } from "./node-executor"; +import { NodeInputMapper } from "./node-input-mapper"; +import { NodeOutputMapper } from "./node-output-mapper"; +import { SecretManager } from "./secret-manager"; // Basic node output value types export type BasicNodeOutputValue = @@ -107,6 +100,14 @@ export class Runtime extends WorkflowEntrypoint { private nodeRegistry: CloudflareNodeRegistry; private toolRegistry: CloudflareToolRegistry; + private planner: ExecutionPlanner; + private inputMapper: NodeInputMapper; + private outputMapper: NodeOutputMapper; + private secretManager: SecretManager; + private creditManager: CreditManager; + private conditionalHandler: ConditionalExecutionHandler; + private persistence: ExecutionPersistence; + private executor: NodeExecutor; constructor(ctx: ExecutionContext, env: Bindings) { super(ctx, env); @@ -115,6 +116,26 @@ export class Runtime extends WorkflowEntrypoint { this.nodeRegistry, this.createNodeContextForTool.bind(this) ); + + // Initialize specialized components + this.planner = new ExecutionPlanner(this.nodeRegistry); + this.inputMapper = new NodeInputMapper(this.nodeRegistry); + this.outputMapper = new NodeOutputMapper(); + this.secretManager = new SecretManager(env); + this.creditManager = new CreditManager(env, this.nodeRegistry); + this.conditionalHandler = new ConditionalExecutionHandler( + this.nodeRegistry, + this.inputMapper + ); + this.persistence = new ExecutionPersistence(env); + this.executor = new NodeExecutor( + env, + this.nodeRegistry, + this.toolRegistry, + this.inputMapper, + this.outputMapper, + this.conditionalHandler + ); } /** @@ -208,10 +229,10 @@ export class Runtime extends WorkflowEntrypoint { } as WorkflowExecution; if ( - !(await this.hasEnoughComputeCredits( + !(await this.creditManager.hasEnoughComputeCredits( organizationId, computeCredits, - this.getNodesComputeCost(workflow.nodes) + this.creditManager.getNodesComputeCost(workflow.nodes) )) ) { runtimeState = { ...runtimeState, status: "exhausted" }; @@ -219,7 +240,7 @@ export class Runtime extends WorkflowEntrypoint { "persist exhausted execution state", Runtime.defaultStepConfig, async () => - this.saveExecutionState( + this.persistence.saveExecutionState( userId, organizationId, workflow.id, @@ -236,7 +257,7 @@ export class Runtime extends WorkflowEntrypoint { const secrets = await step.do( "preload organization secrets", Runtime.defaultStepConfig, - async () => this.preloadAllSecrets(organizationId) + async () => this.secretManager.preloadAllSecrets(organizationId) ); // Prepare workflow (validation + ordering). @@ -254,7 +275,7 @@ export class Runtime extends WorkflowEntrypoint { "persist initial execution status", Runtime.defaultStepConfig, async () => - this.saveExecutionState( + this.persistence.saveExecutionState( userId, organizationId, workflow.id, @@ -280,7 +301,7 @@ export class Runtime extends WorkflowEntrypoint { `run node ${nodeIdentifier}`, Runtime.defaultStepConfig, async () => - this.executeNode( + this.executor.executeNode( runtimeState, workflow.id, nodeIdentifier, @@ -299,7 +320,7 @@ export class Runtime extends WorkflowEntrypoint { `run ${groupDescription}`, Runtime.defaultStepConfig, async () => - this.executeInlineGroup( + this.executor.executeInlineGroup( runtimeState, workflow.id, executionUnit.nodeIds, @@ -316,15 +337,17 @@ export class Runtime extends WorkflowEntrypoint { executionRecord = { ...executionRecord, status: runtimeState.status, - nodeExecutions: this.buildNodeExecutions(runtimeState), + nodeExecutions: this.persistence.buildNodeExecutions(runtimeState), }; - this.sendExecutionUpdateToSession( - workflowSessionId, - executionRecord - ).catch((error) => { - console.error("Failed to send execution update to session:", error); - }); + this.persistence + .sendExecutionUpdateToSession(workflowSessionId, executionRecord) + .catch((error) => { + console.error( + "Failed to send execution update to session:", + error + ); + }); } } } catch (error) { @@ -349,14 +372,14 @@ export class Runtime extends WorkflowEntrypoint { this.env.KV, organizationId, // Update organization compute credits for executed nodes - this.getNodesComputeCost( + this.creditManager.getNodesComputeCost( runtimeState.workflow.nodes.filter((node) => runtimeState.executedNodes.has(node.id) ) ) ); } - return this.saveExecutionState( + return this.persistence.saveExecutionState( userId, organizationId, workflow.id, @@ -369,126 +392,17 @@ export class Runtime extends WorkflowEntrypoint { ); if (workflowSessionId) { - this.sendExecutionUpdateToSession( - workflowSessionId, - executionRecord - ).catch((error) => { - console.error("Failed to send execution update to session:", error); - }); + this.persistence + .sendExecutionUpdateToSession(workflowSessionId, executionRecord) + .catch((error) => { + console.error("Failed to send execution update to session:", error); + }); } } return executionRecord; } - /** - * Checks if the organization has enough compute credits to execute a workflow. - * Credit limits are not enforced in development mode. - */ - private async hasEnoughComputeCredits( - organizationId: string, - computeCredits: number, - computeCost: number - ): Promise { - // Skip credit limit enforcement in development mode - if (this.env.CLOUDFLARE_ENV === "development") { - return true; - } - - const computeUsage = await getOrganizationComputeUsage( - this.env.KV, - organizationId - ); - return computeUsage + computeCost <= computeCredits; - } - - /** - * Returns the compute cost of a list of nodes. - */ - private getNodesComputeCost(nodes: Node[]): number { - return nodes.reduce((acc, node) => { - const nodeType = this.nodeRegistry.getNodeType(node.type); - return acc + (nodeType.computeCost ?? 1); - }, 0); - } - - private async sendExecutionUpdateToSession( - workflowSessionId: string, - execution: WorkflowExecution - ): Promise { - try { - const id = this.env.WORKFLOW_SESSION.idFromName(workflowSessionId); - const stub = this.env.WORKFLOW_SESSION.get(id); - - await stub.fetch(`https://workflow-session/execution`, { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(execution), - }); - } catch (error) { - console.error( - `Failed to send execution update to session ${workflowSessionId}:`, - error - ); - } - } - - /** - * Preloads all organization secrets for synchronous access during workflow execution - */ - private async preloadAllSecrets( - organizationId: string - ): Promise> { - const secrets: Record = {}; - const db = createDatabase(this.env.DB); - - try { - // Get all secret records for the organization (including encrypted values) - const secretRecords = await getAllSecretsWithValues(db, organizationId); - - // Decrypt each secret and add to the secrets object - for (const secretRecord of secretRecords) { - try { - const secretValue = await this.decryptSecretValue( - secretRecord.encryptedValue, - organizationId - ); - secrets[secretRecord.name] = secretValue; - } catch (error) { - console.warn( - `Failed to decrypt secret '${secretRecord.name}':`, - error - ); - } - } - - console.log( - `Preloaded ${Object.keys(secrets).length} secrets for organization ${organizationId}` - ); - } catch (error) { - console.error( - `Failed to preload secrets for organization ${organizationId}:`, - error - ); - } - - return secrets; - } - - /** - * Decrypt a secret value using organization-specific key - */ - private async decryptSecretValue( - encryptedValue: string, - organizationId: string - ): Promise { - // Import decryptSecret here to avoid circular dependency issues - const { decryptSecret } = await import("../utils/encryption"); - return await decryptSecret(encryptedValue, this.env, organizationId); - } - /** * Validates the workflow and creates a sequential execution order with inline groups. */ @@ -502,7 +416,7 @@ export class Runtime extends WorkflowEntrypoint { ); } - const orderedNodes = this.createTopologicalOrder(workflow); + const orderedNodes = this.planner.createTopologicalOrder(workflow); if (orderedNodes.length === 0 && workflow.nodes.length > 0) { throw new NonRetryableError( "Unable to derive execution order. The graph may contain a cycle." @@ -510,7 +424,10 @@ export class Runtime extends WorkflowEntrypoint { } // Create execution plan with inline groups - const executionPlan = this.createExecutionPlan(workflow, orderedNodes); + const executionPlan = this.planner.createExecutionPlan( + workflow, + orderedNodes + ); return { workflow, @@ -522,878 +439,4 @@ export class Runtime extends WorkflowEntrypoint { status: "executing", }; } - - /** - * Creates an execution plan that groups consecutive inlinable nodes together. - * Enhanced version that can handle branching patterns within groups. - * - * Examples of patterns that can now be inlined: - * - * Fan-out pattern: - * A → B - * A → C [A, B, C] can be grouped together - * - * Fan-in pattern: - * A → C - * B → C [A, B, C] can be grouped together - * - * Tree pattern: - * A → B → D - * A → C → D [A, B, C, D] can be grouped together - * - * The old linear approach would have executed these as separate steps, - * but now they execute in a single Cloudflare workflow step. - */ - private createExecutionPlan( - workflow: Workflow, - orderedNodes: string[] - ): ExecutionPlan { - const plan: ExecutionPlan = []; - const processedNodes = new Set(); - let totalInlineGroups = 0; - let totalInlinedNodes = 0; - - for (let i = 0; i < orderedNodes.length; i++) { - const nodeId = orderedNodes[i]; - - if (processedNodes.has(nodeId)) { - continue; // Already processed in a group - } - - const node = workflow.nodes.find((n) => n.id === nodeId); - if (!node) continue; - - const nodeType = this.nodeRegistry.getNodeType(node.type); - const isInlinable = nodeType.inlinable ?? false; - - if (isInlinable) { - // Look ahead to find a group of connected inlinable nodes - const inlineGroup = this.findConnectedInlinableGroup( - workflow, - nodeId, - orderedNodes, - i, - processedNodes - ); - - if (inlineGroup.length === 1) { - // Single node - add as individual - plan.push({ type: "individual", nodeId: inlineGroup[0] }); - } else { - // Multiple nodes - add as inline group - plan.push({ type: "inline", nodeIds: [...inlineGroup] }); - totalInlineGroups++; - totalInlinedNodes += inlineGroup.length; - } - - // Mark all nodes in the group as processed - inlineGroup.forEach((id) => processedNodes.add(id)); - } else { - // Non-inlinable node - add as individual - plan.push({ type: "individual", nodeId }); - processedNodes.add(nodeId); - } - } - - // Log metrics for performance analysis - if (totalInlineGroups > 0) { - const totalInlinableNodes = orderedNodes.filter((nodeId) => { - const node = workflow.nodes.find((n) => n.id === nodeId); - if (!node) return false; - const nodeType = this.nodeRegistry.getNodeType(node.type); - return nodeType.inlinable ?? false; - }).length; - - const inliningEfficiency = - (totalInlinedNodes / totalInlinableNodes) * 100; - console.log( - `Execution plan optimized: ${totalInlineGroups} inline groups containing ${totalInlinedNodes}/${totalInlinableNodes} inlinable nodes (${inliningEfficiency.toFixed(1)}% efficiency)` - ); - - // Log individual group sizes for analysis - const groupSizes = plan - .filter((unit) => unit.type === "inline") - .map((unit) => (unit.type === "inline" ? unit.nodeIds.length : 0)); - - console.log(`Group sizes: [${groupSizes.join(", ")}]`); - } - - return plan; - } - - /** - * Finds a connected group of inlinable nodes starting from a given node. - * Uses a simple algorithm: expand the group as long as all dependencies are satisfied. - */ - private findConnectedInlinableGroup( - workflow: Workflow, - startNodeId: string, - orderedNodes: string[], - startIndex: number, - alreadyProcessed: Set - ): string[] { - const group = [startNodeId]; - const groupSet = new Set([startNodeId]); - - // Look ahead in the topological order for nodes that can be added to this group - for (let i = startIndex + 1; i < orderedNodes.length; i++) { - const candidateId = orderedNodes[i]; - - // Skip if already processed or not inlinable - if (alreadyProcessed.has(candidateId)) continue; - - const candidateNode = workflow.nodes.find((n) => n.id === candidateId); - if (!candidateNode) continue; - - const candidateNodeType = this.nodeRegistry.getNodeType( - candidateNode.type - ); - if (!(candidateNodeType.inlinable ?? false)) continue; - - // Check if this candidate can be safely added to the group - if ( - this.canSafelyAddToGroup( - workflow, - candidateId, - groupSet, - orderedNodes, - startIndex - ) - ) { - group.push(candidateId); - groupSet.add(candidateId); - } - } - - return group; - } - - /** - * Simplified check: a node can be added to a group if all its dependencies - * are either already executed or in the current group. - */ - private canSafelyAddToGroup( - workflow: Workflow, - nodeId: string, - currentGroupSet: Set, - orderedNodes: string[], - groupStartIndex: number - ): boolean { - // Get all dependencies of this node - const dependencies = workflow.edges - .filter((edge) => edge.target === nodeId) - .map((edge) => edge.source); - - // Check each dependency - for (const depId of dependencies) { - const isInGroup = currentGroupSet.has(depId); - const depIndex = orderedNodes.indexOf(depId); - const isAlreadyExecuted = depIndex < groupStartIndex; - - if (!isInGroup && !isAlreadyExecuted) { - return false; // Has unmet dependency - } - } - - return true; - } - - /** - * Executes a group of inlinable nodes sequentially in a single step. - */ - private async executeInlineGroup( - runtimeState: RuntimeState, - workflowId: string, - nodeIds: string[], - organizationId: string, - executionId: string, - secrets: Record, - httpRequest?: HttpRequest, - emailMessage?: EmailMessage - ): Promise { - let currentState = runtimeState; - const groupStartTime = Date.now(); - const executedNodesInGroup: string[] = []; - - console.log(`Starting inline group execution: [${nodeIds.join(", ")}]`); - - // Execute each node in the group sequentially - for (const nodeId of nodeIds) { - // Skip nodes that were already marked as failed or skipped - if ( - currentState.nodeErrors.has(nodeId) || - currentState.skippedNodes.has(nodeId) - ) { - console.log( - `Skipping node ${nodeId} in inline group (already failed/skipped)` - ); - continue; - } - - try { - const nodeStartTime = Date.now(); - - currentState = await this.executeNode( - currentState, - workflowId, - nodeId, - organizationId, - executionId, - secrets, - httpRequest, - emailMessage - ); - - const nodeExecutionTime = Date.now() - nodeStartTime; - - // If execution failed, break the inline group execution - if (currentState.nodeErrors.has(nodeId)) { - console.log( - `Node ${nodeId} failed in inline group after ${nodeExecutionTime}ms, stopping group execution` - ); - break; - } - - executedNodesInGroup.push(nodeId); - console.log( - `Node ${nodeId} completed in inline group (${nodeExecutionTime}ms)` - ); - } catch (error) { - // Handle errors at the group level - const message = error instanceof Error ? error.message : String(error); - currentState.nodeErrors.set(nodeId, message); - currentState.status = "error"; - console.log( - `Fatal error in node ${nodeId} within inline group: ${message}` - ); - break; - } - } - - const totalGroupTime = Date.now() - groupStartTime; - console.log( - `Inline group completed: executed ${executedNodesInGroup.length}/${nodeIds.length} nodes in ${totalGroupTime}ms` - ); - - return currentState; - } - - /** - * Executes a single node and stores its outputs. - */ - private async executeNode( - runtimeState: RuntimeState, - workflowId: string, - nodeIdentifier: string, - organizationId: string, - executionId: string, - secrets: Record, - httpRequest?: HttpRequest, - emailMessage?: EmailMessage - ): Promise { - const node = runtimeState.workflow.nodes.find( - (n): boolean => n.id === nodeIdentifier - ); - if (!node) { - runtimeState.nodeErrors.set( - nodeIdentifier, - `Node not found: ${nodeIdentifier}` - ); - return { ...runtimeState, status: "error" }; - } - - const nodeType = this.nodeRegistry.getNodeType(node.type); - this.env.COMPUTE.writeDataPoint({ - indexes: [organizationId], - blobs: [organizationId, workflowId, node.id], - doubles: [nodeType.computeCost ?? 1], - }); - - // Resolve the runnable implementation. - const executable = this.nodeRegistry.createExecutableNode(node); - if (!executable) { - runtimeState.nodeErrors.set( - nodeIdentifier, - `Node type not implemented: ${node.type}` - ); - return { ...runtimeState, status: "error" }; - } - - // Gather inputs by reading connections and default values. - const inputValues = this.collectNodeInputs(runtimeState, nodeIdentifier); - - try { - const processedInputs = await this.mapRuntimeToNodeInputs( - runtimeState, - nodeIdentifier, - inputValues - ); - - // Configure AI Gateway options for all AI model requests - // If CLOUDFLARE_AI_GATEWAY_ID is set, all AI requests will be routed through the gateway - // for analytics, caching, and rate limiting. If not set, requests go directly to the model. - const aiOptions: AiOptions = {}; - const gatewayId = this.env.CLOUDFLARE_AI_GATEWAY_ID; - if (gatewayId) { - aiOptions.gateway = { - id: gatewayId, - skipCache: false, // Enable caching by default for better performance - }; - } - - const context: NodeContext = { - nodeId: nodeIdentifier, - workflowId: runtimeState.workflow.id, - organizationId, - inputs: processedInputs, - httpRequest, - emailMessage, - onProgress: () => {}, - toolRegistry: this.toolRegistry, - secrets: secrets || {}, - env: { - DB: this.env.DB, - AI: this.env.AI, - AI_OPTIONS: aiOptions, - RESSOURCES: this.env.RESSOURCES, - DATASETS: this.env.DATASETS, - DATASETS_AUTORAG: this.env.DATASETS_AUTORAG, - CLOUDFLARE_ACCOUNT_ID: this.env.CLOUDFLARE_ACCOUNT_ID, - CLOUDFLARE_API_TOKEN: this.env.CLOUDFLARE_API_TOKEN, - CLOUDFLARE_AI_GATEWAY_ID: this.env.CLOUDFLARE_AI_GATEWAY_ID, - TWILIO_ACCOUNT_SID: this.env.TWILIO_ACCOUNT_SID, - TWILIO_AUTH_TOKEN: this.env.TWILIO_AUTH_TOKEN, - TWILIO_PHONE_NUMBER: this.env.TWILIO_PHONE_NUMBER, - SENDGRID_API_KEY: this.env.SENDGRID_API_KEY, - SENDGRID_DEFAULT_FROM: this.env.SENDGRID_DEFAULT_FROM, - RESEND_API_KEY: this.env.RESEND_API_KEY, - RESEND_DEFAULT_FROM: this.env.RESEND_DEFAULT_FROM, - AWS_ACCESS_KEY_ID: this.env.AWS_ACCESS_KEY_ID, - AWS_SECRET_ACCESS_KEY: this.env.AWS_SECRET_ACCESS_KEY, - AWS_REGION: this.env.AWS_REGION, - SES_DEFAULT_FROM: this.env.SES_DEFAULT_FROM, - EMAIL_DOMAIN: this.env.EMAIL_DOMAIN, - OPENAI_API_KEY: this.env.OPENAI_API_KEY, - ANTHROPIC_API_KEY: this.env.ANTHROPIC_API_KEY, - GEMINI_API_KEY: this.env.GEMINI_API_KEY, - HUGGINGFACE_API_KEY: this.env.HUGGINGFACE_API_KEY, - }, - }; - - const result = await executable.execute(context); - - if (result.status === "completed") { - const outputsForRuntime = await this.mapNodeToRuntimeOutputs( - runtimeState, - nodeIdentifier, - result.outputs ?? {}, - organizationId, - executionId - ); - runtimeState.nodeOutputs.set( - nodeIdentifier, - outputsForRuntime as NodeOutputs - ); - runtimeState.executedNodes.add(nodeIdentifier); - - // After successful execution, mark nodes connected to inactive outputs as skipped - runtimeState = this.markInactiveOutputNodesAsSkipped( - runtimeState, - nodeIdentifier, - result.outputs ?? {} - ); - } else { - const failureMessage = result.error ?? "Unknown error"; - runtimeState.nodeErrors.set(nodeIdentifier, failureMessage); - runtimeState.status = "error"; - } - - // Determine final workflow status. - if (runtimeState.status !== "error") { - const allNodesVisited = runtimeState.executionPlan.every((unit) => - unit.type === "individual" - ? runtimeState.executedNodes.has(unit.nodeId) || - runtimeState.skippedNodes.has(unit.nodeId) || - runtimeState.nodeErrors.has(unit.nodeId) - : unit.type === "inline" - ? unit.nodeIds.every( - (id: string) => - runtimeState.executedNodes.has(id) || - runtimeState.skippedNodes.has(id) || - runtimeState.nodeErrors.has(id) - ) - : false - ); - runtimeState.status = - allNodesVisited && runtimeState.nodeErrors.size === 0 - ? "completed" - : "executing"; - } - - return runtimeState; - } catch (error) { - if ( - error instanceof Error && - error.message.startsWith("Required input") - ) { - runtimeState.skippedNodes.add(nodeIdentifier); - - // Determine final workflow status. - if (runtimeState.status !== "error") { - const allNodesVisited = runtimeState.executionPlan.every((unit) => - unit.type === "individual" - ? runtimeState.executedNodes.has(unit.nodeId) || - runtimeState.skippedNodes.has(unit.nodeId) || - runtimeState.nodeErrors.has(unit.nodeId) - : unit.type === "inline" - ? unit.nodeIds.every( - (id: string) => - runtimeState.executedNodes.has(id) || - runtimeState.skippedNodes.has(id) || - runtimeState.nodeErrors.has(id) - ) - : false - ); - runtimeState.status = - allNodesVisited && runtimeState.nodeErrors.size === 0 - ? "completed" - : "executing"; - } - return runtimeState; - } - const message = error instanceof Error ? error.message : String(error); - runtimeState.nodeErrors.set(nodeIdentifier, message); - runtimeState.status = "error"; - return runtimeState; - } - } - - /** - * Returns inputs for a node by checking its default values and inbound edges. - */ - private collectNodeInputs( - runtimeState: RuntimeState, - nodeIdentifier: string - ): NodeOutputs { - const inputs: NodeOutputs = {}; - const node = runtimeState.workflow.nodes.find( - (n): boolean => n.id === nodeIdentifier - ); - if (!node) return inputs; - - // Defaults declared directly on the node. - for (const input of node.inputs) { - if (input.value !== undefined) { - if ( - typeof input.value === "string" || - typeof input.value === "number" || - typeof input.value === "boolean" || - (typeof input.value === "object" && input.value !== null) - ) { - inputs[input.name] = input.value as NodeOutputValue; - } - } - } - - // Values coming from connected nodes. - const inboundEdges = runtimeState.workflow.edges.filter( - (edge): boolean => edge.target === nodeIdentifier - ); - - // Group edges by target input to handle multiple connections - const edgesByInput = new Map(); - for (const edge of inboundEdges) { - const inputName = edge.targetInput; - if (!edgesByInput.has(inputName)) { - edgesByInput.set(inputName, []); - } - edgesByInput.get(inputName)!.push(edge); - } - - // Process each input's connections - for (const [inputName, edges] of edgesByInput) { - // Get the node type definition to check repeated - const executable = this.nodeRegistry.createExecutableNode(node); - const nodeTypeDefinition = executable - ? (executable.constructor as any).nodeType - : null; - const nodeTypeInput = nodeTypeDefinition?.inputs?.find( - (input: any) => input.name === inputName - ); - - // Check repeated from node type definition (not workflow node) - const acceptsMultiple = nodeTypeInput?.repeated || false; - - const values: BasicNodeOutputValue[] = []; - - for (const edge of edges) { - const sourceOutputs = runtimeState.nodeOutputs.get(edge.source); - if (sourceOutputs && sourceOutputs[edge.sourceOutput] !== undefined) { - const value = sourceOutputs[edge.sourceOutput]; - if ( - typeof value === "string" || - typeof value === "number" || - typeof value === "boolean" || - (typeof value === "object" && value !== null) - ) { - values.push(value as BasicNodeOutputValue); - } - } - } - - if (values.length > 0) { - if (acceptsMultiple) { - // For parameters that accept multiple connections, provide an array - inputs[inputName] = values; - } else { - // For single connection parameters, use the last value (current behavior) - inputs[inputName] = values[values.length - 1]; - } - } - } - - return inputs; - } - - /** - * Converts raw runtime inputs to the representation expected by the node. - */ - private async mapRuntimeToNodeInputs( - runtimeState: RuntimeState, - nodeIdentifier: string, - inputValues: Record - ): Promise> { - const node = runtimeState.workflow.nodes.find( - (n) => n.id === nodeIdentifier - ); - if (!node) throw new Error(`Node ${nodeIdentifier} not found`); - - const processed: Record = {}; - const objectStore = new ObjectStore(this.env.RESSOURCES); - - for (const definition of node.inputs) { - const { name, type, required } = definition; - const value = inputValues[name]; - - if (required && value === undefined) { - throw new Error( - `Required input '${name}' missing for node ${nodeIdentifier}` - ); - } - if (value === undefined || value === null) continue; - - // Check if this parameter accepts multiple connections - const executable = this.nodeRegistry.createExecutableNode(node); - const nodeTypeDefinition = executable - ? (executable.constructor as any).nodeType - : null; - const nodeTypeInput = nodeTypeDefinition?.inputs?.find( - (input: any) => input.name === name - ); - const acceptsMultiple = nodeTypeInput?.repeated || false; - - // Handle secret parameters as strings since secrets are preloaded in context - const parameterType = type === "secret" ? "string" : type; - - if (acceptsMultiple && Array.isArray(value)) { - // For parameters that accept multiple connections, process each value individually - const processedArray = []; - for (const singleValue of value) { - const validSingleValue = singleValue as - | string - | number - | boolean - | ObjectReference - | JsonArray - | JsonObject; - const processedSingleValue = await apiToNodeParameter( - parameterType, - validSingleValue, - objectStore - ); - processedArray.push(processedSingleValue); - } - processed[name] = processedArray; - } else { - // Single value processing (existing logic) - const validValue = value as - | string - | number - | boolean - | ObjectReference - | JsonArray - | JsonObject; - const processedValue = await apiToNodeParameter( - parameterType, - validValue, - objectStore - ); - processed[name] = processedValue; - } - } - - return processed; - } - - /** - * Converts node outputs to a serialisable runtime representation. - */ - private async mapNodeToRuntimeOutputs( - runtimeState: RuntimeState, - nodeIdentifier: string, - outputsFromNode: Record, - organizationId: string, - executionId: string - ): Promise> { - const node = runtimeState.workflow.nodes.find( - (n) => n.id === nodeIdentifier - ); - if (!node) throw new Error(`Node ${nodeIdentifier} not found`); - - const processed: Record = {}; - const objectStore = new ObjectStore(this.env.RESSOURCES); - - for (const definition of node.outputs) { - const { name, type } = definition; - const value = outputsFromNode[name]; - if (value === undefined || value === null) continue; - - // Handle secret parameters as strings since secrets are preloaded in context - const parameterType = type === "secret" ? "string" : type; - - processed[name] = await nodeToApiParameter( - parameterType, - value, - objectStore, - organizationId, - executionId - ); - } - return processed; - } - - /** - * Calculates a topological ordering of nodes. Returns an empty array if a cycle is detected. - */ - private createTopologicalOrder(workflow: Workflow): string[] { - const inDegree: Record = {}; - const adjacency: Record = {}; - - for (const node of workflow.nodes) { - inDegree[node.id] = 0; - adjacency[node.id] = []; - } - - for (const edge of workflow.edges) { - adjacency[edge.source].push(edge.target); - inDegree[edge.target] += 1; - } - - const queue: string[] = Object.keys(inDegree).filter( - (id) => inDegree[id] === 0 - ); - const ordered: string[] = []; - - while (queue.length > 0) { - const current = queue.shift()!; - ordered.push(current); - - for (const neighbour of adjacency[current]) { - inDegree[neighbour] -= 1; - if (inDegree[neighbour] === 0) { - queue.push(neighbour); - } - } - } - - // If ordering missed nodes, a cycle exists. - return ordered.length === workflow.nodes.length ? ordered : []; - } - - /** - * Builds node execution list from runtime state - */ - private buildNodeExecutions(runtimeState: RuntimeState) { - return runtimeState.workflow.nodes.map((node) => { - if (runtimeState.executedNodes.has(node.id)) { - return { - nodeId: node.id, - status: "completed" as const, - outputs: runtimeState.nodeOutputs.get(node.id) || {}, - }; - } - if (runtimeState.nodeErrors.has(node.id)) { - return { - nodeId: node.id, - status: "error" as const, - error: runtimeState.nodeErrors.get(node.id), - }; - } - if (runtimeState.skippedNodes.has(node.id)) { - return { - nodeId: node.id, - status: "skipped" as const, - }; - } - return { - nodeId: node.id, - status: "executing" as const, - }; - }); - } - - /** - * Persists the workflow execution state to the database. - */ - private async saveExecutionState( - userId: string, - organizationId: string, - workflowId: string, - instanceId: string, - runtimeState: RuntimeState, - startedAt?: Date, - endedAt?: Date - ): Promise { - const nodeExecutionList = this.buildNodeExecutions(runtimeState); - - const executionStatus = runtimeState.status; - const errorMsg = - runtimeState.nodeErrors.size > 0 - ? Array.from(runtimeState.nodeErrors.values()).join(", ") - : undefined; - - try { - const db = createDatabase(this.env.DB); - return await saveExecution(db, { - id: instanceId, - workflowId, - userId, - organizationId, - status: executionStatus as ExecutionStatusType, - nodeExecutions: nodeExecutionList, - error: errorMsg, - updatedAt: new Date(), - startedAt, - endedAt, - }); - } catch (error) { - console.error("Failed to persist execution record:", error); - // Continue without interrupting the workflow. - } - - return { - id: instanceId, - workflowId, - status: executionStatus, - nodeExecutions: nodeExecutionList, - error: errorMsg, - startedAt, - endedAt, - }; - } - - /** - * Marks nodes connected to inactive outputs as skipped. - * This is crucial for conditional logic where only one branch should execute. - */ - private markInactiveOutputNodesAsSkipped( - runtimeState: RuntimeState, - nodeIdentifier: string, - nodeOutputs: Record - ): RuntimeState { - const node = runtimeState.workflow.nodes.find( - (n) => n.id === nodeIdentifier - ); - if (!node) return runtimeState; - - // Find outputs that were NOT produced - const inactiveOutputs = node.outputs - .map((output) => output.name) - .filter((outputName) => !(outputName in nodeOutputs)); - - if (inactiveOutputs.length === 0) return runtimeState; - - // Find all edges from this node's inactive outputs - const inactiveEdges = runtimeState.workflow.edges.filter( - (edge) => - edge.source === nodeIdentifier && - inactiveOutputs.includes(edge.sourceOutput) - ); - - // Process each target node of inactive edges - for (const edge of inactiveEdges) { - this.markNodeAsSkippedIfNoValidInputs(runtimeState, edge.target); - } - - return runtimeState; - } - - /** - * Marks a node as skipped if it cannot execute due to missing required inputs. - * This is smarter than recursively skipping all dependents. - */ - private markNodeAsSkippedIfNoValidInputs( - runtimeState: RuntimeState, - nodeId: string - ): void { - if ( - runtimeState.skippedNodes.has(nodeId) || - runtimeState.executedNodes.has(nodeId) - ) { - return; // Already processed - } - - const node = runtimeState.workflow.nodes.find((n) => n.id === nodeId); - if (!node) return; - - // Check if this node has all required inputs satisfied - const allRequiredInputsSatisfied = this.nodeHasAllRequiredInputsSatisfied( - runtimeState, - nodeId - ); - - // Only skip if the node cannot execute (missing required inputs) - if (!allRequiredInputsSatisfied) { - runtimeState.skippedNodes.add(nodeId); - - // Recursively check dependents of this skipped node - const outgoingEdges = runtimeState.workflow.edges.filter( - (edge) => edge.source === nodeId - ); - - for (const edge of outgoingEdges) { - this.markNodeAsSkippedIfNoValidInputs(runtimeState, edge.target); - } - } - } - - /** - * Checks if a node has all required inputs satisfied. - * A node can execute if all its required inputs are available. - */ - private nodeHasAllRequiredInputsSatisfied( - runtimeState: RuntimeState, - nodeId: string - ): boolean { - const node = runtimeState.workflow.nodes.find((n) => n.id === nodeId); - if (!node) return false; - - // Get the node type definition to check for required inputs - const executable = this.nodeRegistry.createExecutableNode(node); - if (!executable) return false; - - const nodeTypeDefinition = (executable.constructor as any).nodeType; - if (!nodeTypeDefinition) return false; - - const inputValues = this.collectNodeInputs(runtimeState, nodeId); - - // Check each required input based on the node type definition (not workflow node definition) - for (const input of nodeTypeDefinition.inputs) { - if (input.required && inputValues[input.name] === undefined) { - return false; // Found a required input that's missing - } - } - - return true; // All required inputs are satisfied - } } diff --git a/apps/api/src/runtime/secret-manager.ts b/apps/api/src/runtime/secret-manager.ts new file mode 100644 index 00000000..7e369ee1 --- /dev/null +++ b/apps/api/src/runtime/secret-manager.ts @@ -0,0 +1,64 @@ +import type { Bindings } from "../context"; +import { createDatabase, getAllSecretsWithValues } from "../db"; + +/** + * Manages organization secrets for workflow execution. + * Handles preloading and decryption of secrets. + */ +export class SecretManager { + constructor(private env: Bindings) {} + + /** + * Preloads all organization secrets for synchronous access during workflow execution + */ + async preloadAllSecrets( + organizationId: string + ): Promise> { + const secrets: Record = {}; + const db = createDatabase(this.env.DB); + + try { + // Get all secret records for the organization (including encrypted values) + const secretRecords = await getAllSecretsWithValues(db, organizationId); + + // Decrypt each secret and add to the secrets object + for (const secretRecord of secretRecords) { + try { + const secretValue = await this.decryptSecretValue( + secretRecord.encryptedValue, + organizationId + ); + secrets[secretRecord.name] = secretValue; + } catch (error) { + console.warn( + `Failed to decrypt secret '${secretRecord.name}':`, + error + ); + } + } + + console.log( + `Preloaded ${Object.keys(secrets).length} secrets for organization ${organizationId}` + ); + } catch (error) { + console.error( + `Failed to preload secrets for organization ${organizationId}:`, + error + ); + } + + return secrets; + } + + /** + * Decrypt a secret value using organization-specific key + */ + private async decryptSecretValue( + encryptedValue: string, + organizationId: string + ): Promise { + // Import decryptSecret here to avoid circular dependency issues + const { decryptSecret } = await import("../utils/encryption"); + return await decryptSecret(encryptedValue, this.env, organizationId); + } +} From 280e76432d5833e60e1a253ccc8b8231438f3b1a Mon Sep 17 00:00:00 2001 From: Bertil Chapuis Date: Tue, 7 Oct 2025 08:53:18 +0200 Subject: [PATCH 29/29] Add unit tests for the components of the runtime --- .../conditional-execution-handler.test.ts | 459 ++++++++++++++++++ apps/api/src/runtime/credit-manager.test.ts | 205 ++++++++ .../src/runtime/execution-persistence.test.ts | 410 ++++++++++++++++ .../api/src/runtime/execution-planner.test.ts | 268 ++++++++++ .../api/src/runtime/node-input-mapper.test.ts | 414 ++++++++++++++++ .../src/runtime/node-output-mapper.test.ts | 284 +++++++++++ apps/api/src/runtime/object-store.test.ts | 455 +++++++++++++++++ 7 files changed, 2495 insertions(+) create mode 100644 apps/api/src/runtime/conditional-execution-handler.test.ts create mode 100644 apps/api/src/runtime/credit-manager.test.ts create mode 100644 apps/api/src/runtime/execution-persistence.test.ts create mode 100644 apps/api/src/runtime/execution-planner.test.ts create mode 100644 apps/api/src/runtime/node-input-mapper.test.ts create mode 100644 apps/api/src/runtime/node-output-mapper.test.ts create mode 100644 apps/api/src/runtime/object-store.test.ts diff --git a/apps/api/src/runtime/conditional-execution-handler.test.ts b/apps/api/src/runtime/conditional-execution-handler.test.ts new file mode 100644 index 00000000..e4e37f36 --- /dev/null +++ b/apps/api/src/runtime/conditional-execution-handler.test.ts @@ -0,0 +1,459 @@ +import type { Workflow } from "@dafthunk/types"; +import { describe, expect, it, vi } from "vitest"; + +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import { ConditionalExecutionHandler } from "./conditional-execution-handler"; +import type { NodeInputMapper } from "./node-input-mapper"; +import type { RuntimeState, WorkflowOutputs } from "./runtime"; + +describe("ConditionalExecutionHandler", () => { + const createMockRegistry = ( + nodeTypes: Record< + string, + { inputs?: Array<{ name: string; required?: boolean }> } + > + ): CloudflareNodeRegistry => { + return { + createExecutableNode: vi.fn((node) => ({ + constructor: { + nodeType: nodeTypes[node.type], + }, + })), + } as any; + }; + + const createMockInputMapper = ( + inputResults: Record> + ): NodeInputMapper => { + return { + collectNodeInputs: vi.fn((_runtimeState, nodeId) => { + return inputResults[nodeId] || {}; + }), + } as any; + }; + + describe("markInactiveOutputNodesAsSkipped", () => { + it("should not mark any nodes when all outputs are active", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-1", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "conditional", + inputs: [], + outputs: [ + { name: "true", type: "string" }, + { name: "false", type: "string" }, + ], + }, + { + id: "B", + type: "text", + inputs: [{ name: "input", type: "string" }], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "true", + target: "B", + targetInput: "input", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([["A", { true: "yes", false: "no" }]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + conditional: { + inputs: [], + }, + text: { inputs: [{ name: "input", required: true }] }, + }); + const inputMapper = createMockInputMapper({ + B: { input: "yes" }, + }); + const handler = new ConditionalExecutionHandler(registry, inputMapper); + + const result = handler.markInactiveOutputNodesAsSkipped( + runtimeState, + "A", + { true: "yes", false: "no" } + ); + + expect(result.skippedNodes.size).toBe(0); + }); + + it("should mark nodes connected to inactive outputs as skipped", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-2", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "conditional", + inputs: [], + outputs: [ + { name: "true", type: "string" }, + { name: "false", type: "string" }, + ], + }, + { + id: "B", + type: "text", + inputs: [{ name: "input", type: "string", required: true }], + outputs: [], + }, + { + id: "C", + type: "text", + inputs: [{ name: "input", type: "string", required: true }], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "true", + target: "B", + targetInput: "input", + }, + { + source: "A", + sourceOutput: "false", + target: "C", + targetInput: "input", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([["A", { true: "yes" }]]), // only "true" output + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + conditional: { inputs: [] }, + text: { inputs: [{ name: "input", required: true }] }, + }); + const inputMapper = createMockInputMapper({ + B: { input: "yes" }, + C: {}, // No input from A's false output + }); + const handler = new ConditionalExecutionHandler(registry, inputMapper); + + const result = handler.markInactiveOutputNodesAsSkipped( + runtimeState, + "A", + { true: "yes" } // "false" output is inactive + ); + + expect(result.skippedNodes.has("C")).toBe(true); + expect(result.skippedNodes.has("B")).toBe(false); + }); + + it("should recursively skip dependent nodes", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-3", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "conditional", + inputs: [], + outputs: [ + { name: "true", type: "string" }, + { name: "false", type: "string" }, + ], + }, + { + id: "B", + type: "text", + inputs: [{ name: "input", type: "string", required: true }], + outputs: [{ name: "output", type: "string" }], + }, + { + id: "C", + type: "text", + inputs: [{ name: "input", type: "string", required: true }], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "false", + target: "B", + targetInput: "input", + }, + { + source: "B", + sourceOutput: "output", + target: "C", + targetInput: "input", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([["A", { true: "yes" }]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + conditional: { inputs: [] }, + text: { inputs: [{ name: "input", required: true }] }, + }); + const inputMapper = createMockInputMapper({ + B: {}, // No input + C: {}, // No input (because B will be skipped) + }); + const handler = new ConditionalExecutionHandler(registry, inputMapper); + + const result = handler.markInactiveOutputNodesAsSkipped( + runtimeState, + "A", + { true: "yes" } + ); + + // Both B and C should be skipped + expect(result.skippedNodes.has("B")).toBe(true); + expect(result.skippedNodes.has("C")).toBe(true); + }); + + it("should not skip nodes with optional inputs", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-4", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "conditional", + inputs: [], + outputs: [ + { name: "true", type: "string" }, + { name: "false", type: "string" }, + ], + }, + { + id: "B", + type: "text", + inputs: [{ name: "input", type: "string", required: false }], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "false", + target: "B", + targetInput: "input", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([["A", { true: "yes" }]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + conditional: { inputs: [] }, + text: { inputs: [{ name: "input", required: false }] }, // Optional! + }); + const inputMapper = createMockInputMapper({ + B: {}, // No input, but it's optional + }); + const handler = new ConditionalExecutionHandler(registry, inputMapper); + + const result = handler.markInactiveOutputNodesAsSkipped( + runtimeState, + "A", + { true: "yes" } + ); + + // B should NOT be skipped because input is optional + expect(result.skippedNodes.has("B")).toBe(false); + }); + + it("should not skip nodes with alternative valid inputs", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-5", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "conditional", + inputs: [], + outputs: [ + { name: "true", type: "string" }, + { name: "false", type: "string" }, + ], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [{ name: "output", type: "string" }], + }, + { + id: "C", + type: "merge", + inputs: [ + { name: "input1", type: "string", required: true }, + { name: "input2", type: "string", required: false }, + ], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "false", + target: "C", + targetInput: "input2", + }, + { + source: "B", + sourceOutput: "output", + target: "C", + targetInput: "input1", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([ + ["A", { true: "yes" }], + ["B", { output: "from B" }], + ]) as unknown as WorkflowOutputs, + executedNodes: new Set(["A", "B"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + conditional: { inputs: [] }, + text: { inputs: [] }, + merge: { + inputs: [ + { name: "input1", required: true }, + { name: "input2", required: false }, + ], + }, + }); + const inputMapper = createMockInputMapper({ + C: { input1: "from B" }, // Has required input from B + }); + const handler = new ConditionalExecutionHandler(registry, inputMapper); + + const result = handler.markInactiveOutputNodesAsSkipped( + runtimeState, + "A", + { true: "yes" } + ); + + // C should NOT be skipped because it has input1 from B + expect(result.skippedNodes.has("C")).toBe(false); + }); + + it("should handle node not found", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-6", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({}); + const inputMapper = createMockInputMapper({}); + const handler = new ConditionalExecutionHandler(registry, inputMapper); + + const result = handler.markInactiveOutputNodesAsSkipped( + runtimeState, + "NonExistent", + {} + ); + + expect(result).toBe(runtimeState); + }); + + it("should handle node with no outputs", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-7", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map([["A", {}]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + text: { inputs: [] }, + }); + const inputMapper = createMockInputMapper({}); + const handler = new ConditionalExecutionHandler(registry, inputMapper); + + const result = handler.markInactiveOutputNodesAsSkipped( + runtimeState, + "A", + {} + ); + + expect(result.skippedNodes.size).toBe(0); + }); + }); +}); diff --git a/apps/api/src/runtime/credit-manager.test.ts b/apps/api/src/runtime/credit-manager.test.ts new file mode 100644 index 00000000..cfae9ecf --- /dev/null +++ b/apps/api/src/runtime/credit-manager.test.ts @@ -0,0 +1,205 @@ +import type { Node } from "@dafthunk/types"; +import { describe, expect, it, vi } from "vitest"; + +import type { Bindings } from "../context"; +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import { CreditManager } from "./credit-manager"; + +// Mock the credits utility +vi.mock("../utils/credits", () => ({ + getOrganizationComputeUsage: vi.fn(), +})); + +import { getOrganizationComputeUsage } from "../utils/credits"; + +describe("CreditManager", () => { + const createMockEnv = (cloudflareEnv?: string): Bindings => { + return { + CLOUDFLARE_ENV: cloudflareEnv, + KV: {} as any, + } as Bindings; + }; + + const createMockRegistry = ( + nodeTypes: Record + ): CloudflareNodeRegistry => { + return { + getNodeType: vi.fn((type: string) => nodeTypes[type] || {}), + } as any; + }; + + describe("hasEnoughComputeCredits", () => { + it("should always return true in development mode", async () => { + const env = createMockEnv("development"); + const registry = createMockRegistry({}); + const manager = new CreditManager(env, registry); + + const result = await manager.hasEnoughComputeCredits( + "org-123", + 100, // computeCredits + 200 // computeCost (exceeds credits) + ); + + expect(result).toBe(true); + expect(getOrganizationComputeUsage).not.toHaveBeenCalled(); + }); + + it("should return true when credits are sufficient", async () => { + const env = createMockEnv("production"); + const registry = createMockRegistry({}); + const manager = new CreditManager(env, registry); + + vi.mocked(getOrganizationComputeUsage).mockResolvedValue(50); // current usage + + const result = await manager.hasEnoughComputeCredits( + "org-123", + 100, // total credits + 30 // additional cost needed + ); + + expect(result).toBe(true); // 50 + 30 = 80 <= 100 + }); + + it("should return false when credits are insufficient", async () => { + const env = createMockEnv("production"); + const registry = createMockRegistry({}); + const manager = new CreditManager(env, registry); + + vi.mocked(getOrganizationComputeUsage).mockResolvedValue(80); // current usage + + const result = await manager.hasEnoughComputeCredits( + "org-123", + 100, // total credits + 30 // additional cost needed + ); + + expect(result).toBe(false); // 80 + 30 = 110 > 100 + }); + + it("should return true when exactly at credit limit", async () => { + const env = createMockEnv("production"); + const registry = createMockRegistry({}); + const manager = new CreditManager(env, registry); + + vi.mocked(getOrganizationComputeUsage).mockResolvedValue(70); // current usage + + const result = await manager.hasEnoughComputeCredits( + "org-123", + 100, // total credits + 30 // additional cost needed + ); + + expect(result).toBe(true); // 70 + 30 = 100 == 100 + }); + + it("should handle zero current usage", async () => { + const env = createMockEnv("production"); + const registry = createMockRegistry({}); + const manager = new CreditManager(env, registry); + + vi.mocked(getOrganizationComputeUsage).mockResolvedValue(0); + + const result = await manager.hasEnoughComputeCredits("org-123", 100, 50); + + expect(result).toBe(true); // 0 + 50 = 50 <= 100 + }); + }); + + describe("getNodesComputeCost", () => { + it("should calculate total cost for multiple nodes", () => { + const registry = createMockRegistry({ + text: { computeCost: 1 }, + ai: { computeCost: 10 }, + image: { computeCost: 5 }, + }); + const manager = new CreditManager({} as Bindings, registry); + + const nodes: Node[] = [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "ai", inputs: [], outputs: [] }, + { id: "C", type: "image", inputs: [], outputs: [] }, + ] as unknown as Node[]; + + const result = manager.getNodesComputeCost(nodes); + + expect(result).toBe(16); // 1 + 10 + 5 + }); + + it("should use default cost of 1 when computeCost not specified", () => { + const registry = createMockRegistry({ + text: {}, // no computeCost specified + unknown: {}, // no computeCost specified + }); + const manager = new CreditManager({} as Bindings, registry); + + const nodes: Node[] = [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "unknown", inputs: [], outputs: [] }, + ] as unknown as Node[]; + + const result = manager.getNodesComputeCost(nodes); + + expect(result).toBe(2); // 1 + 1 (defaults) + }); + + it("should handle empty node list", () => { + const registry = createMockRegistry({}); + const manager = new CreditManager({} as Bindings, registry); + + const result = manager.getNodesComputeCost([]); + + expect(result).toBe(0); + }); + + it("should handle nodes with zero cost", () => { + const registry = createMockRegistry({ + free: { computeCost: 0 }, + }); + const manager = new CreditManager({} as Bindings, registry); + + const nodes: Node[] = [ + { id: "A", type: "free", inputs: [], outputs: [] }, + { id: "B", type: "free", inputs: [], outputs: [] }, + ] as unknown as Node[]; + + const result = manager.getNodesComputeCost(nodes); + + expect(result).toBe(0); + }); + + it("should handle single node", () => { + const registry = createMockRegistry({ + expensive: { computeCost: 100 }, + }); + const manager = new CreditManager({} as Bindings, registry); + + const nodes: Node[] = [ + { id: "A", type: "expensive", inputs: [], outputs: [] }, + ] as unknown as Node[]; + + const result = manager.getNodesComputeCost(nodes); + + expect(result).toBe(100); + }); + + it("should sum costs correctly for many nodes", () => { + const registry = createMockRegistry({ + type1: { computeCost: 3 }, + type2: { computeCost: 7 }, + }); + const manager = new CreditManager({} as Bindings, registry); + + const nodes: Node[] = Array.from({ length: 10 }, (_, i) => ({ + id: `node-${i}`, + type: i % 2 === 0 ? "type1" : "type2", + inputs: [], + outputs: [], + })) as unknown as Node[]; + + const result = manager.getNodesComputeCost(nodes); + + // 5 nodes of type1 (3 each) + 5 nodes of type2 (7 each) = 15 + 35 = 50 + expect(result).toBe(50); + }); + }); +}); diff --git a/apps/api/src/runtime/execution-persistence.test.ts b/apps/api/src/runtime/execution-persistence.test.ts new file mode 100644 index 00000000..1add405a --- /dev/null +++ b/apps/api/src/runtime/execution-persistence.test.ts @@ -0,0 +1,410 @@ +import type { Workflow, WorkflowExecution } from "@dafthunk/types"; +import { describe, expect, it, vi } from "vitest"; + +import type { Bindings } from "../context"; +import { ExecutionPersistence } from "./execution-persistence"; +import type { RuntimeState } from "./runtime"; + +// Mock the db module +vi.mock("../db", () => ({ + createDatabase: vi.fn(() => ({})), + saveExecution: vi.fn(async (_db, execution) => execution), +})); + +import { saveExecution } from "../db"; + +describe("ExecutionPersistence", () => { + const createMockEnv = (): Bindings => { + const mockFetch = vi.fn().mockResolvedValue({ ok: true }); + const mockGet = vi.fn().mockReturnValue({ fetch: mockFetch }); + const mockIdFromName = vi.fn().mockReturnValue("mock-id"); + + return { + DB: {} as any, + WORKFLOW_SESSION: { + idFromName: mockIdFromName, + get: mockGet, + } as any, + } as Bindings; + }; + + describe("buildNodeExecutions", () => { + it("should build execution list with completed nodes", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-1", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [{ name: "result", type: "string" }], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [{ name: "result", type: "string" }], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map([ + ["A", { result: "output A" }], + ["B", { result: "output B" }], + ]), + executedNodes: new Set(["A", "B"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "completed", + }; + + const persistence = new ExecutionPersistence(createMockEnv()); + const result = persistence.buildNodeExecutions(runtimeState); + + expect(result).toEqual([ + { + nodeId: "A", + status: "completed", + outputs: { result: "output A" }, + }, + { + nodeId: "B", + status: "completed", + outputs: { result: "output B" }, + }, + ]); + }); + + it("should build execution list with error nodes", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-2", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map([["A", {}]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map([["B", "Something went wrong"]]), + executionPlan: [], + status: "error", + }; + + const persistence = new ExecutionPersistence(createMockEnv()); + const result = persistence.buildNodeExecutions(runtimeState); + + expect(result).toEqual([ + { + nodeId: "A", + status: "completed", + outputs: {}, + }, + { + nodeId: "B", + status: "error", + error: "Something went wrong", + }, + ]); + }); + + it("should build execution list with skipped nodes", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-3", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map([["A", {}]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(["B"]), + nodeErrors: new Map(), + executionPlan: [], + status: "completed", + }; + + const persistence = new ExecutionPersistence(createMockEnv()); + const result = persistence.buildNodeExecutions(runtimeState); + + expect(result).toEqual([ + { + nodeId: "A", + status: "completed", + outputs: {}, + }, + { + nodeId: "B", + status: "skipped", + }, + ]); + }); + + it("should mark unprocessed nodes as executing", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-4", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map([["A", {}]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const persistence = new ExecutionPersistence(createMockEnv()); + const result = persistence.buildNodeExecutions(runtimeState); + + expect(result).toEqual([ + { + nodeId: "A", + status: "completed", + outputs: {}, + }, + { + nodeId: "B", + status: "executing", + }, + ]); + }); + }); + + describe("saveExecutionState", () => { + it("should save execution state to database", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-123", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map([["A", {}]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "completed", + }; + + const persistence = new ExecutionPersistence(createMockEnv()); + const startedAt = new Date("2024-01-01T00:00:00Z"); + const endedAt = new Date("2024-01-01T00:01:00Z"); + + const result = await persistence.saveExecutionState( + "user-123", + "org-123", + "workflow-123", + "exec-456", + runtimeState, + startedAt, + endedAt + ); + + expect(saveExecution).toHaveBeenCalledWith( + {}, + expect.objectContaining({ + id: "exec-456", + workflowId: "workflow-123", + userId: "user-123", + organizationId: "org-123", + status: "completed", + startedAt, + endedAt, + }) + ); + + expect(result.id).toBe("exec-456"); + expect(result.status).toBe("completed"); + }); + + it("should handle errors with multiple node errors", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-123", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map([ + ["A", "Error 1"], + ["B", "Error 2"], + ]), + executionPlan: [], + status: "error", + }; + + const persistence = new ExecutionPersistence(createMockEnv()); + + const result = await persistence.saveExecutionState( + "user-123", + "org-123", + "workflow-123", + "exec-456", + runtimeState + ); + + expect(result.error).toBe("Error 1, Error 2"); + }); + + it("should handle database save failure gracefully", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-123", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "completed", + }; + + vi.mocked(saveExecution).mockRejectedValueOnce( + new Error("Database error") + ); + + const persistence = new ExecutionPersistence(createMockEnv()); + + const result = await persistence.saveExecutionState( + "user-123", + "org-123", + "workflow-123", + "exec-456", + runtimeState + ); + + // Should return execution record even when database fails + expect(result.id).toBe("exec-456"); + expect(result.workflowId).toBe("workflow-123"); + }); + }); + + describe("sendExecutionUpdateToSession", () => { + it("should send execution update via WebSocket", async () => { + const env = createMockEnv(); + const persistence = new ExecutionPersistence(env); + + const execution: WorkflowExecution = { + id: "exec-123", + workflowId: "workflow-456", + status: "executing", + nodeExecutions: [], + }; + + await persistence.sendExecutionUpdateToSession("session-789", execution); + + expect(env.WORKFLOW_SESSION.idFromName).toHaveBeenCalledWith( + "session-789" + ); + expect(env.WORKFLOW_SESSION.get).toHaveBeenCalledWith("mock-id"); + }); + + it("should handle WebSocket send failure gracefully", async () => { + const env = createMockEnv(); + const mockGet = env.WORKFLOW_SESSION.get as any; + mockGet.mockReturnValue({ + fetch: vi.fn().mockRejectedValue(new Error("Connection failed")), + }); + + const persistence = new ExecutionPersistence(env); + + const execution: WorkflowExecution = { + id: "exec-123", + workflowId: "workflow-456", + status: "executing", + nodeExecutions: [], + }; + + // Should not throw + await expect( + persistence.sendExecutionUpdateToSession("session-789", execution) + ).resolves.toBeUndefined(); + }); + }); +}); diff --git a/apps/api/src/runtime/execution-planner.test.ts b/apps/api/src/runtime/execution-planner.test.ts new file mode 100644 index 00000000..ae01d577 --- /dev/null +++ b/apps/api/src/runtime/execution-planner.test.ts @@ -0,0 +1,268 @@ +import type { Workflow } from "@dafthunk/types"; +import { describe, expect, it, vi } from "vitest"; + +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import { ExecutionPlanner } from "./execution-planner"; + +describe("ExecutionPlanner", () => { + const createMockRegistry = ( + nodeTypes: Record + ): CloudflareNodeRegistry => { + return { + getNodeType: vi.fn((type: string) => nodeTypes[type] || {}), + } as any; + }; + + describe("createTopologicalOrder", () => { + it("should create correct topological order for simple linear workflow", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "text", inputs: [], outputs: [] }, + { id: "C", type: "text", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "B", targetInput: "in" }, + { source: "B", sourceOutput: "out", target: "C", targetInput: "in" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({}); + const planner = new ExecutionPlanner(registry); + + const result = planner.createTopologicalOrder(workflow); + + expect(result).toEqual(["A", "B", "C"]); + }); + + it("should handle branching workflows (fan-out)", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "text", inputs: [], outputs: [] }, + { id: "C", type: "text", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "B", targetInput: "in" }, + { source: "A", sourceOutput: "out", target: "C", targetInput: "in" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({}); + const planner = new ExecutionPlanner(registry); + + const result = planner.createTopologicalOrder(workflow); + + expect(result[0]).toBe("A"); + expect(result).toContain("B"); + expect(result).toContain("C"); + expect(result).toHaveLength(3); + }); + + it("should handle merging workflows (fan-in)", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "text", inputs: [], outputs: [] }, + { id: "C", type: "text", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "C", targetInput: "in1" }, + { source: "B", sourceOutput: "out", target: "C", targetInput: "in2" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({}); + const planner = new ExecutionPlanner(registry); + + const result = planner.createTopologicalOrder(workflow); + + expect(result[2]).toBe("C"); + expect(result).toContain("A"); + expect(result).toContain("B"); + expect(result).toHaveLength(3); + }); + + it("should detect cycles and return empty array", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "text", inputs: [], outputs: [] }, + { id: "C", type: "text", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "B", targetInput: "in" }, + { source: "B", sourceOutput: "out", target: "C", targetInput: "in" }, + { source: "C", sourceOutput: "out", target: "A", targetInput: "in" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({}); + const planner = new ExecutionPlanner(registry); + + const result = planner.createTopologicalOrder(workflow); + + expect(result).toEqual([]); + }); + + it("should handle workflow with no edges", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "text", inputs: [], outputs: [] }, + ], + edges: [], + } as unknown as Workflow; + + const registry = createMockRegistry({}); + const planner = new ExecutionPlanner(registry); + + const result = planner.createTopologicalOrder(workflow); + + expect(result).toHaveLength(2); + expect(result).toContain("A"); + expect(result).toContain("B"); + }); + }); + + describe("createExecutionPlan", () => { + it("should create individual execution units for non-inlinable nodes", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "text", inputs: [], outputs: [] }, + { id: "B", type: "text", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "B", targetInput: "in" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({ + text: { inlinable: false }, + }); + const planner = new ExecutionPlanner(registry); + + const ordered = ["A", "B"]; + const result = planner.createExecutionPlan(workflow, ordered); + + expect(result).toEqual([ + { type: "individual", nodeId: "A" }, + { type: "individual", nodeId: "B" }, + ]); + }); + + it("should group consecutive inlinable nodes", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "math", inputs: [], outputs: [] }, + { id: "B", type: "math", inputs: [], outputs: [] }, + { id: "C", type: "math", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "B", targetInput: "in" }, + { source: "B", sourceOutput: "out", target: "C", targetInput: "in" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({ + math: { inlinable: true }, + }); + const planner = new ExecutionPlanner(registry); + + const ordered = ["A", "B", "C"]; + const result = planner.createExecutionPlan(workflow, ordered); + + expect(result).toEqual([{ type: "inline", nodeIds: ["A", "B", "C"] }]); + }); + + it("should handle mixed inlinable and non-inlinable nodes", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "math", inputs: [], outputs: [] }, + { id: "B", type: "ai", inputs: [], outputs: [] }, + { id: "C", type: "math", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "B", targetInput: "in" }, + { source: "B", sourceOutput: "out", target: "C", targetInput: "in" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({ + math: { inlinable: true }, + ai: { inlinable: false }, + }); + const planner = new ExecutionPlanner(registry); + + const ordered = ["A", "B", "C"]; + const result = planner.createExecutionPlan(workflow, ordered); + + expect(result).toEqual([ + { type: "individual", nodeId: "A" }, + { type: "individual", nodeId: "B" }, + { type: "individual", nodeId: "C" }, + ]); + }); + + it("should group fan-out pattern of inlinable nodes", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [ + { id: "A", type: "math", inputs: [], outputs: [] }, + { id: "B", type: "math", inputs: [], outputs: [] }, + { id: "C", type: "math", inputs: [], outputs: [] }, + ], + edges: [ + { source: "A", sourceOutput: "out", target: "B", targetInput: "in" }, + { source: "A", sourceOutput: "out", target: "C", targetInput: "in" }, + ], + } as unknown as Workflow; + + const registry = createMockRegistry({ + math: { inlinable: true }, + }); + const planner = new ExecutionPlanner(registry); + + const ordered = ["A", "B", "C"]; + const result = planner.createExecutionPlan(workflow, ordered); + + expect(result).toEqual([{ type: "inline", nodeIds: ["A", "B", "C"] }]); + }); + + it("should handle single inlinable node as individual", () => { + const workflow: Workflow = { + id: "test", + name: "test", + nodes: [{ id: "A", type: "math", inputs: [], outputs: [] }], + edges: [], + } as unknown as Workflow; + + const registry = createMockRegistry({ + math: { inlinable: true }, + }); + const planner = new ExecutionPlanner(registry); + + const ordered = ["A"]; + const result = planner.createExecutionPlan(workflow, ordered); + + expect(result).toEqual([{ type: "individual", nodeId: "A" }]); + }); + }); +}); diff --git a/apps/api/src/runtime/node-input-mapper.test.ts b/apps/api/src/runtime/node-input-mapper.test.ts new file mode 100644 index 00000000..6367b486 --- /dev/null +++ b/apps/api/src/runtime/node-input-mapper.test.ts @@ -0,0 +1,414 @@ +import type { Workflow } from "@dafthunk/types"; +import { describe, expect, it, vi } from "vitest"; + +import type { CloudflareNodeRegistry } from "../nodes/cloudflare-node-registry"; +import { NodeInputMapper } from "./node-input-mapper"; +import type { ObjectStore } from "./object-store"; +import type { RuntimeState } from "./runtime"; + +describe("NodeInputMapper", () => { + const createMockRegistry = ( + nodeTypes: Record< + string, + { + inputs?: Array<{ + name: string; + repeated?: boolean; + required?: boolean; + }>; + } + > + ): CloudflareNodeRegistry => { + return { + createExecutableNode: vi.fn((node) => ({ + constructor: { + nodeType: nodeTypes[node.type], + }, + })), + } as any; + }; + + describe("collectNodeInputs", () => { + it("should collect default values from node definition", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-1", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [ + { name: "text", type: "string", value: "hello" }, + { name: "count", type: "number", value: 42 }, + ], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + text: { + inputs: [ + { name: "text", required: false }, + { name: "count", required: false }, + ], + }, + }); + const mapper = new NodeInputMapper(registry); + + const result = mapper.collectNodeInputs(runtimeState, "A"); + + expect(result).toEqual({ + text: "hello", + count: 42, + }); + }); + + it("should collect values from connected nodes", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-2", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [{ name: "result", type: "string" }], + }, + { + id: "B", + type: "text", + inputs: [{ name: "input", type: "string" }], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "result", + target: "B", + targetInput: "input", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([["A", { result: "test value" }]]), + executedNodes: new Set(["A"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + text: { inputs: [{ name: "input", required: false }] }, + }); + const mapper = new NodeInputMapper(registry); + + const result = mapper.collectNodeInputs(runtimeState, "B"); + + expect(result).toEqual({ + input: "test value", + }); + }); + + it("should handle repeated parameters as array", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-3", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [{ name: "out", type: "string" }], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [{ name: "out", type: "string" }], + }, + { + id: "C", + type: "merge", + inputs: [{ name: "values", type: "string" }], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "out", + target: "C", + targetInput: "values", + }, + { + source: "B", + sourceOutput: "out", + target: "C", + targetInput: "values", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([ + ["A", { out: "value1" }], + ["B", { out: "value2" }], + ]), + executedNodes: new Set(["A", "B"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + merge: { inputs: [{ name: "values", repeated: true }] }, + }); + const mapper = new NodeInputMapper(registry); + + const result = mapper.collectNodeInputs(runtimeState, "C"); + + expect(result).toEqual({ + values: ["value1", "value2"], + }); + }); + + it("should use last value for non-repeated parameters with multiple connections", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-4", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [{ name: "out", type: "string" }], + }, + { + id: "B", + type: "text", + inputs: [], + outputs: [{ name: "out", type: "string" }], + }, + { + id: "C", + type: "text", + inputs: [{ name: "input", type: "string" }], + outputs: [], + }, + ], + edges: [ + { + source: "A", + sourceOutput: "out", + target: "C", + targetInput: "input", + }, + { + source: "B", + sourceOutput: "out", + target: "C", + targetInput: "input", + }, + ], + } as unknown as Workflow, + nodeOutputs: new Map([ + ["A", { out: "value1" }], + ["B", { out: "value2" }], + ]), + executedNodes: new Set(["A", "B"]), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + text: { inputs: [{ name: "input", repeated: false }] }, + }); + const mapper = new NodeInputMapper(registry); + + const result = mapper.collectNodeInputs(runtimeState, "C"); + + expect(result).toEqual({ + input: "value2", // Last value + }); + }); + + it("should return empty object for node not found", () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-5", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({}); + const mapper = new NodeInputMapper(registry); + + const result = mapper.collectNodeInputs(runtimeState, "NonExistent"); + + expect(result).toEqual({}); + }); + }); + + describe("mapRuntimeToNodeInputs", () => { + it("should throw error for missing required input", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-6", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [{ name: "required", type: "string", required: true }], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + text: { inputs: [{ name: "required", required: true }] }, + }); + const mapper = new NodeInputMapper(registry); + const mockStore = {} as ObjectStore; + + await expect( + mapper.mapRuntimeToNodeInputs(runtimeState, "A", {}, mockStore) + ).rejects.toThrow("Required input 'required' missing for node A"); + }); + + it("should skip undefined and null values", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-7", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [ + { name: "optional1", type: "string", required: false }, + { name: "optional2", type: "string", required: false }, + ], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + text: { + inputs: [ + { name: "optional1", required: false }, + { name: "optional2", required: false }, + ], + }, + }); + const mapper = new NodeInputMapper(registry); + const mockStore = {} as ObjectStore; + + const result = await mapper.mapRuntimeToNodeInputs( + runtimeState, + "A", + { optional1: undefined, optional2: null }, + mockStore + ); + + expect(result).toEqual({}); + }); + + it("should process repeated parameters as array", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-8", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "merge", + inputs: [{ name: "values", type: "string", required: false }], + outputs: [], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const registry = createMockRegistry({ + merge: { inputs: [{ name: "values", repeated: true }] }, + }); + const mapper = new NodeInputMapper(registry); + const mockStore = {} as ObjectStore; + + const result = await mapper.mapRuntimeToNodeInputs( + runtimeState, + "A", + { values: ["a", "b", "c"] }, + mockStore + ); + + expect(result).toEqual({ + values: ["a", "b", "c"], + }); + }); + }); +}); diff --git a/apps/api/src/runtime/node-output-mapper.test.ts b/apps/api/src/runtime/node-output-mapper.test.ts new file mode 100644 index 00000000..84b0de51 --- /dev/null +++ b/apps/api/src/runtime/node-output-mapper.test.ts @@ -0,0 +1,284 @@ +import type { Workflow } from "@dafthunk/types"; +import { describe, expect, it, vi } from "vitest"; + +import { NodeOutputMapper } from "./node-output-mapper"; +import type { ObjectStore } from "./object-store"; +import type { RuntimeState } from "./runtime"; + +// Mock the parameter mapper module +vi.mock("../nodes/parameter-mapper", () => ({ + nodeToApiParameter: vi.fn(async (type, value) => { + // Simple mock that returns the value as-is for most types + if (type === "object") { + return { id: "mock-object-ref", mimeType: "application/octet-stream" }; + } + return value; + }), +})); + +describe("NodeOutputMapper", () => { + describe("mapNodeToRuntimeOutputs", () => { + it("should map basic output values", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-1", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [ + { name: "text", type: "string" }, + { name: "count", type: "number" }, + ], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const mapper = new NodeOutputMapper(); + const mockStore = {} as ObjectStore; + + const result = await mapper.mapNodeToRuntimeOutputs( + runtimeState, + "A", + { text: "hello", count: 42 }, + mockStore, + "org-123", + "exec-456" + ); + + expect(result).toEqual({ + text: "hello", + count: 42, + }); + }); + + it("should skip undefined and null output values", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-2", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [ + { name: "output1", type: "string" }, + { name: "output2", type: "string" }, + { name: "output3", type: "string" }, + ], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const mapper = new NodeOutputMapper(); + const mockStore = {} as ObjectStore; + + const result = await mapper.mapNodeToRuntimeOutputs( + runtimeState, + "A", + { output1: "value", output2: undefined, output3: null }, + mockStore, + "org-123", + "exec-456" + ); + + expect(result).toEqual({ + output1: "value", + }); + }); + + it("should throw error when node not found", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-3", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const mapper = new NodeOutputMapper(); + const mockStore = {} as ObjectStore; + + await expect( + mapper.mapNodeToRuntimeOutputs( + runtimeState, + "NonExistent", + {}, + mockStore, + "org-123", + "exec-456" + ) + ).rejects.toThrow("Node NonExistent not found"); + }); + + it("should handle multiple output types", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-4", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "multi", + inputs: [], + outputs: [ + { name: "text", type: "string" }, + { name: "number", type: "number" }, + { name: "bool", type: "boolean" }, + { name: "json", type: "json" }, + ], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const mapper = new NodeOutputMapper(); + const mockStore = {} as ObjectStore; + + const result = await mapper.mapNodeToRuntimeOutputs( + runtimeState, + "A", + { + text: "hello", + number: 42, + bool: true, + json: { key: "value" }, + }, + mockStore, + "org-123", + "exec-456" + ); + + expect(result).toEqual({ + text: "hello", + number: 42, + bool: true, + json: { key: "value" }, + }); + }); + + it("should handle empty outputs object", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-5", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [{ name: "output", type: "string" }], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const mapper = new NodeOutputMapper(); + const mockStore = {} as ObjectStore; + + const result = await mapper.mapNodeToRuntimeOutputs( + runtimeState, + "A", + {}, + mockStore, + "org-123", + "exec-456" + ); + + expect(result).toEqual({}); + }); + + it("should handle outputs with no matching values", async () => { + const runtimeState: RuntimeState = { + workflow: { + id: "workflow-6", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [ + { + id: "A", + type: "text", + inputs: [], + outputs: [ + { name: "output1", type: "string" }, + { name: "output2", type: "string" }, + ], + }, + ], + edges: [], + } as unknown as Workflow, + nodeOutputs: new Map(), + executedNodes: new Set(), + skippedNodes: new Set(), + nodeErrors: new Map(), + executionPlan: [], + status: "executing", + }; + + const mapper = new NodeOutputMapper(); + const mockStore = {} as ObjectStore; + + const result = await mapper.mapNodeToRuntimeOutputs( + runtimeState, + "A", + { someOtherOutput: "value" }, + mockStore, + "org-123", + "exec-456" + ); + + expect(result).toEqual({}); + }); + }); +}); diff --git a/apps/api/src/runtime/object-store.test.ts b/apps/api/src/runtime/object-store.test.ts new file mode 100644 index 00000000..3a1ada07 --- /dev/null +++ b/apps/api/src/runtime/object-store.test.ts @@ -0,0 +1,455 @@ +import type { + ObjectReference, + Workflow, + WorkflowExecution, +} from "@dafthunk/types"; +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import { ObjectStore } from "./object-store"; + +describe("ObjectStore", () => { + let mockBucket: any; + + beforeEach(() => { + mockBucket = { + put: vi.fn().mockResolvedValue({ etag: "mock-etag" }), + get: vi.fn(), + delete: vi.fn().mockResolvedValue(undefined), + list: vi.fn(), + }; + }); + + describe("Binary Object Storage", () => { + describe("writeObject", () => { + it("should write object and return reference with generated ID", async () => { + const store = new ObjectStore(mockBucket); + const data = new Uint8Array([1, 2, 3]); + + const result = await store.writeObject( + data, + "image/png", + "org-123", + "exec-456" + ); + + expect(result).toHaveProperty("id"); + expect(result.mimeType).toBe("image/png"); + expect(mockBucket.put).toHaveBeenCalledWith( + expect.stringContaining("objects/"), + data, + expect.objectContaining({ + httpMetadata: expect.objectContaining({ + contentType: "image/png", + }), + }) + ); + }); + }); + + describe("writeObjectWithId", () => { + it("should write object with specific ID", async () => { + const store = new ObjectStore(mockBucket); + const data = new Uint8Array([1, 2, 3]); + + const result = await store.writeObjectWithId( + "custom-id", + data, + "image/jpeg", + "org-123" + ); + + expect(result.id).toBe("custom-id"); + expect(result.mimeType).toBe("image/jpeg"); + expect(mockBucket.put).toHaveBeenCalledWith( + "objects/custom-id/object.data", + data, + expect.objectContaining({ + customMetadata: expect.objectContaining({ + id: "custom-id", + organizationId: "org-123", + }), + }) + ); + }); + + it("should include executionId when provided", async () => { + const store = new ObjectStore(mockBucket); + const data = new Uint8Array([1, 2, 3]); + + await store.writeObjectWithId( + "custom-id", + data, + "image/png", + "org-123", + "exec-456" + ); + + expect(mockBucket.put).toHaveBeenCalledWith( + expect.any(String), + expect.any(Uint8Array), + expect.objectContaining({ + customMetadata: expect.objectContaining({ + executionId: "exec-456", + }), + }) + ); + }); + }); + + describe("readObject", () => { + it("should read object and return data with metadata", async () => { + const mockData = new Uint8Array([1, 2, 3]); + mockBucket.get.mockResolvedValue({ + arrayBuffer: vi.fn().mockResolvedValue(mockData.buffer), + size: 3, + customMetadata: { organizationId: "org-123" }, + }); + + const store = new ObjectStore(mockBucket); + const reference: ObjectReference = { + id: "obj-123", + mimeType: "image/png", + }; + + const result = await store.readObject(reference); + + expect(result).not.toBeNull(); + expect(result?.data).toEqual(mockData); + expect(result?.metadata).toEqual({ organizationId: "org-123" }); + expect(mockBucket.get).toHaveBeenCalledWith( + "objects/obj-123/object.data" + ); + }); + + it("should return null when object not found", async () => { + mockBucket.get.mockResolvedValue(null); + + const store = new ObjectStore(mockBucket); + const reference: ObjectReference = { + id: "obj-123", + mimeType: "image/png", + }; + + const result = await store.readObject(reference); + + expect(result).toBeNull(); + }); + }); + + describe("deleteObject", () => { + it("should delete object", async () => { + const store = new ObjectStore(mockBucket); + const reference: ObjectReference = { + id: "obj-123", + mimeType: "image/png", + }; + + await store.deleteObject(reference); + + expect(mockBucket.delete).toHaveBeenCalledWith( + "objects/obj-123/object.data" + ); + }); + }); + + describe("listObjects", () => { + it("should list objects for organization", async () => { + mockBucket.list.mockResolvedValue({ + objects: [ + { + key: "objects/obj-1/object.data", + size: 100, + httpMetadata: { contentType: "image/png" }, + customMetadata: { + organizationId: "org-123", + createdAt: "2024-01-01T00:00:00Z", + executionId: "exec-1", + }, + }, + { + key: "objects/obj-2/object.data", + size: 200, + httpMetadata: { contentType: "image/jpeg" }, + customMetadata: { + organizationId: "org-123", + createdAt: "2024-01-02T00:00:00Z", + }, + }, + { + key: "objects/obj-3/object.data", + size: 300, + httpMetadata: { contentType: "image/gif" }, + customMetadata: { + organizationId: "org-456", // Different org + createdAt: "2024-01-03T00:00:00Z", + }, + }, + ], + }); + + const store = new ObjectStore(mockBucket); + const result = await store.listObjects("org-123"); + + expect(result).toHaveLength(2); + expect(result[0]).toEqual({ + id: "obj-1", + mimeType: "image/png", + size: 100, + createdAt: new Date("2024-01-01T00:00:00Z"), + organizationId: "org-123", + executionId: "exec-1", + }); + expect(result[1].id).toBe("obj-2"); + }); + }); + }); + + describe("Workflow Storage", () => { + describe("writeWorkflow", () => { + it("should write workflow to storage", async () => { + const store = new ObjectStore(mockBucket); + const workflow: Workflow = { + id: "workflow-123", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow; + + const result = await store.writeWorkflow(workflow); + + expect(result).toBe("workflow-123"); + expect(mockBucket.put).toHaveBeenCalledWith( + "workflows/workflow-123/workflow.json", + JSON.stringify(workflow), + expect.objectContaining({ + httpMetadata: expect.objectContaining({ + contentType: "application/json", + }), + }) + ); + }); + }); + + describe("readWorkflow", () => { + it("should read workflow from storage", async () => { + const workflow: Workflow = { + id: "workflow-123", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow; + + mockBucket.get.mockResolvedValue({ + text: vi.fn().mockResolvedValue(JSON.stringify(workflow)), + size: 100, + }); + + const store = new ObjectStore(mockBucket); + const result = await store.readWorkflow("workflow-123"); + + expect(result).toEqual(workflow); + expect(mockBucket.get).toHaveBeenCalledWith( + "workflows/workflow-123/workflow.json" + ); + }); + + it("should throw error when workflow not found", async () => { + mockBucket.get.mockResolvedValue(null); + + const store = new ObjectStore(mockBucket); + + await expect(store.readWorkflow("workflow-123")).rejects.toThrow( + "Workflow not found: workflow-123" + ); + }); + }); + + describe("deleteWorkflow", () => { + it("should delete workflow from storage", async () => { + const store = new ObjectStore(mockBucket); + + await store.deleteWorkflow("workflow-123"); + + expect(mockBucket.delete).toHaveBeenCalledWith( + "workflows/workflow-123/workflow.json" + ); + }); + }); + }); + + describe("Execution Workflow Storage", () => { + describe("writeExecutionWorkflow", () => { + it("should write execution workflow snapshot", async () => { + const store = new ObjectStore(mockBucket); + const workflow: Workflow = { + id: "workflow-123", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow; + + const result = await store.writeExecutionWorkflow("exec-456", workflow); + + expect(result).toBe("exec-456"); + expect(mockBucket.put).toHaveBeenCalledWith( + "executions/exec-456/workflow.json", + JSON.stringify(workflow), + expect.objectContaining({ + customMetadata: expect.objectContaining({ + executionId: "exec-456", + workflowId: "workflow-123", + }), + }) + ); + }); + }); + + describe("readExecutionWorkflow", () => { + it("should read execution workflow snapshot", async () => { + const workflow: Workflow = { + id: "workflow-123", + name: "Test Workflow", + handle: "test-workflow", + type: "manual", + nodes: [], + edges: [], + } as unknown as Workflow; + + mockBucket.get.mockResolvedValue({ + text: vi.fn().mockResolvedValue(JSON.stringify(workflow)), + size: 100, + }); + + const store = new ObjectStore(mockBucket); + const result = await store.readExecutionWorkflow("exec-456"); + + expect(result).toEqual(workflow); + expect(mockBucket.get).toHaveBeenCalledWith( + "executions/exec-456/workflow.json" + ); + }); + + it("should throw error when execution workflow not found", async () => { + mockBucket.get.mockResolvedValue(null); + + const store = new ObjectStore(mockBucket); + + await expect(store.readExecutionWorkflow("exec-456")).rejects.toThrow( + "Workflow not found for execution: exec-456" + ); + }); + }); + }); + + describe("Execution Storage", () => { + describe("writeExecution", () => { + it("should write execution to storage", async () => { + const store = new ObjectStore(mockBucket); + const execution: WorkflowExecution = { + id: "exec-456", + workflowId: "workflow-123", + status: "completed", + nodeExecutions: [], + }; + + const result = await store.writeExecution(execution); + + expect(result).toBe("exec-456"); + expect(mockBucket.put).toHaveBeenCalledWith( + "executions/exec-456/execution.json", + JSON.stringify(execution), + expect.objectContaining({ + customMetadata: expect.objectContaining({ + workflowId: "workflow-123", + status: "completed", + }), + }) + ); + }); + }); + + describe("readExecution", () => { + it("should read execution from storage", async () => { + const execution: WorkflowExecution = { + id: "exec-456", + workflowId: "workflow-123", + status: "completed", + nodeExecutions: [], + }; + + mockBucket.get.mockResolvedValue({ + text: vi.fn().mockResolvedValue(JSON.stringify(execution)), + size: 100, + }); + + const store = new ObjectStore(mockBucket); + const result = await store.readExecution("exec-456"); + + expect(result).toEqual(execution); + expect(mockBucket.get).toHaveBeenCalledWith( + "executions/exec-456/execution.json" + ); + }); + + it("should throw error when execution not found", async () => { + mockBucket.get.mockResolvedValue(null); + + const store = new ObjectStore(mockBucket); + + await expect(store.readExecution("exec-456")).rejects.toThrow( + "Execution not found: exec-456" + ); + }); + }); + + describe("deleteExecution", () => { + it("should delete execution from storage", async () => { + const store = new ObjectStore(mockBucket); + + await store.deleteExecution("exec-456"); + + expect(mockBucket.delete).toHaveBeenCalledWith( + "executions/exec-456/execution.json" + ); + }); + }); + }); + + describe("Error Handling", () => { + it("should throw error when bucket not initialized", async () => { + const store = new ObjectStore(null as any); + + await expect( + store.writeObject(new Uint8Array([1, 2, 3]), "image/png", "org-123") + ).rejects.toThrow("R2 bucket is not initialized"); + }); + + it("should handle bucket put failure", async () => { + mockBucket.put.mockRejectedValue(new Error("Storage error")); + + const store = new ObjectStore(mockBucket); + + await expect( + store.writeObject(new Uint8Array([1, 2, 3]), "image/png", "org-123") + ).rejects.toThrow("Storage error"); + }); + + it("should handle bucket get failure", async () => { + mockBucket.get.mockRejectedValue(new Error("Read error")); + + const store = new ObjectStore(mockBucket); + const reference: ObjectReference = { + id: "obj-123", + mimeType: "image/png", + }; + + await expect(store.readObject(reference)).rejects.toThrow("Read error"); + }); + }); +});