diff --git a/apps/demo-mcp/README.md b/apps/demo-mcp/README.md index b4426ec096..a9e53876d4 100644 --- a/apps/demo-mcp/README.md +++ b/apps/demo-mcp/README.md @@ -12,6 +12,52 @@ bun run apps/demo-mcp/src/cli.ts import reddit --file ./export_username_date.zip This creates a SQLite database at `.data/reddit.db` with your Reddit posts, comments, and other data. +## Plaintext export/import (Markdown) + +Export database rows to deterministic plaintext files under `vault//...` using Markdown (with frontmatter): + +```bash +bun run apps/demo-mcp/src/cli.ts export-fs reddit --db ./.data/reddit.db --repo . +``` + +Import files from the repo back into the database: + +```bash +bun run apps/demo-mcp/src/cli.ts import-fs reddit --db ./.data/reddit.db --repo . +``` + +Notes: + +- Files are Markdown only, written under `vault///.md`. + +## Try it + +1) Import your Reddit export into a local DB + +```bash +bun run apps/demo-mcp/src/cli.ts import reddit --file ./export_username_date.zip --db ./.data/reddit.db +``` + +2) Export DB rows to Markdown files in your repo + +```bash +bun run apps/demo-mcp/src/cli.ts export-fs reddit --db ./.data/reddit.db --repo . +``` + +You should see Markdown files under `vault/reddit/
/...`. To re-import from files into the DB: + +```bash +bun run apps/demo-mcp/src/cli.ts import-fs reddit --db ./.data/reddit.db --repo . +``` + +### CLI usage + +```bash +bun run apps/demo-mcp/src/cli.ts --help +# or +bun run apps/demo-mcp/src/cli.ts help +``` + ## MCP Integration with Claude Code Once you have imported your data, you can connect the database to Claude Code for natural language querying. diff --git a/apps/demo-mcp/package.json b/apps/demo-mcp/package.json index 387ce0e3a1..d061a00377 100644 --- a/apps/demo-mcp/package.json +++ b/apps/demo-mcp/package.json @@ -5,13 +5,16 @@ "type": "module", "description": "Minimal CLI to import Reddit export ZIP into a local LibSQL DB using the Reddit adapter", "scripts": { - "dev": "bun run src/cli.ts import", - "import": "bun run src/cli.ts import", - "serve": "bun run src/cli.ts serve", - "check": "tsc --noEmit" + "dev": "bun run src/cli.ts import reddit", + "import": "bun run src/cli.ts import reddit", + "export-fs": "bun run src/cli.ts export-fs reddit", + "import-fs": "bun run src/cli.ts import-fs reddit", + "sync": "bun run src/cli.ts sync reddit", + "serve": "bun run src/cli.ts serve" }, "dependencies": { "@libsql/client": "^0.11.0", + "@repo/vault-core": "workspace:*", "drizzle-orm": "catalog:" } } diff --git a/apps/demo-mcp/src/cli.ts b/apps/demo-mcp/src/cli.ts index 50eafd6cdc..8095f8d127 100644 --- a/apps/demo-mcp/src/cli.ts +++ b/apps/demo-mcp/src/cli.ts @@ -1,17 +1,21 @@ #!/usr/bin/env bun /** - * Minimal CLI to import a Reddit export ZIP into a local LibSQL database. + * Minimal CLI for the Reddit demo adapter. + * * Commands: - * - import [--file ] [--db ] - * - serve [--db ] (stub) + * - import [--file ] [--db ] + * - export-fs [--db ] [--repo ] (Markdown only) + * - import-fs [--db ] [--repo ] (Markdown only) + * - serve [--db ] (stub) * - * Defaults: - * --file defaults to ./export_rocket_scientist2_20250811.zip (cwd) - * --db defaults to ./.data/reddit.db (cwd) + * Defaults (if not provided): + * --file (relative to cwd) + * --db ./.data/reddit.db (relative to cwd) + * --repo . (current working directory) * - * DATABASE_URL (optional): - * If set, overrides the db URL entirely (e.g., libsql://..., file:/abs/path.db). + * Environment: + * DATABASE_URL (optional) overrides the db URL entirely (e.g., libsql://..., file:/abs/path.db). */ import fs from 'node:fs/promises'; @@ -19,15 +23,16 @@ import path from 'node:path'; import { fileURLToPath } from 'node:url'; import { createClient } from '@libsql/client'; import type { Adapter } from '@repo/vault-core'; -import { Vault } from '@repo/vault-core'; +import { createVault, defaultConvention } from '@repo/vault-core'; +import { markdownFormat } from '@repo/vault-core/codecs'; import { drizzle } from 'drizzle-orm/libsql'; -import { migrate } from 'drizzle-orm/libsql/migrator'; // ------------------------------------------------------------- type CLIArgs = { _: string[]; // positional file?: string; db?: string; + repo?: string; }; function parseArgs(argv: string[]): CLIArgs { @@ -42,6 +47,10 @@ function parseArgs(argv: string[]): CLIArgs { out.db = argv[++i]; } else if (a.startsWith('--db=')) { out.db = a.slice('--db='.length); + } else if (a === '--repo') { + out.repo = argv[++i]; + } else if (a.startsWith('--repo=')) { + out.repo = a.slice('--repo='.length); } else if (!a.startsWith('-')) { out._.push(a); } @@ -56,13 +65,30 @@ const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const repoRoot = path.resolve(__dirname, '../../..'); // apps/demo-mcp/src -> repo root -function resolveZipPath(p?: string): string { +function getBinPath(): string { + const rel = path.relative(process.cwd(), __filename); + return rel || __filename; +} + +function printHelp(): void { + const bin = getBinPath(); + console.log( + `Usage:\n bun run ${bin} [options]\n\nCommands:\n import Import a Reddit export ZIP into the database\n export-fs Export DB rows to Markdown files under vault//...\n import-fs Import Markdown files from vault//... into the DB\n\nOptions:\n --file Path to Reddit export ZIP (import only)\n --db Path to SQLite DB file (default: ./.data/reddit.db or DATABASE_URL)\n --repo Repo root for plaintext I/O (default: .)\n -h, --help Show this help\n\nNotes:\n - Files are Markdown only, written under vault//
/.md\n - DATABASE_URL, if set, overrides --db entirely.\n`, + ); +} + +function resolveZipPath(p: string): string { const candidate = p ?? './export_rocket_scientist2_20250811.zip'; return path.resolve(process.cwd(), candidate); } -function resolveDbFile(p?: string): string { - const candidate = p ?? './.data/reddit.db'; +function resolveDbFile(p: string): string { + const candidate = p; + return path.resolve(process.cwd(), candidate); +} + +function resolveRepoDir(p?: string): string { + const candidate = p ?? '.'; return path.resolve(process.cwd(), candidate); } @@ -85,31 +111,14 @@ function toDbUrl(dbFileAbs: string): string { } // ------------------------------------------------------------- -// Import command +// Helpers to work with new core API // ------------------------------------------------------------- -async function cmdImport(args: CLIArgs, adapterID: string) { - const zipPath = resolveZipPath(args.file); - const dbFile = resolveDbFile(args.db); - const dbUrl = toDbUrl(dbFile); - - // Prepare DB and run migrations - await ensureDirExists(dbFile); - const client = createClient({ url: dbUrl }); - const rawDb = drizzle(client); - // Cast libsql drizzle DB to the generic BaseSQLiteDatabase shape expected by Vault - const db = rawDb; - - // Read input once (adapters may ignore if not applicable) - const data = await fs.readFile(zipPath); - const blob = new Blob([new Uint8Array(data)], { type: 'application/zip' }); - - // Build adapter instances, ensuring migrations path is absolute per adapter package - let adapter: Adapter | undefined; - - // This is just patch code, don't look too closely! - const keys = await fs.readdir( - path.resolve(repoRoot, 'packages/vault-core/src/adapters'), +async function findAdapter(adapterID: string): Promise { + const adaptersDir = path.resolve( + repoRoot, + 'packages/vault-core/src/adapters', ); + const keys = await fs.readdir(adaptersDir); for (const key of keys) { const modulePath = import.meta.resolve( `../../../packages/vault-core/src/adapters/${key}`, @@ -117,61 +126,174 @@ async function cmdImport(args: CLIArgs, adapterID: string) { const mod = (await import(modulePath)) as Record; for (const func of Object.values(mod)) { if (typeof func !== 'function') continue; - const a = func(); + try { + const a = func(); + if (a && typeof a === 'object' && 'id' in a && a.id === adapterID) { + return a as Adapter; + } + } catch { + // ignore factory functions that require params or throw + } + } + } + throw new Error(`Could not find adapter for key ${adapterID}`); +} + +async function writeFilesToRepo( + repoDir: string, + files: Map, +): Promise { + let count = 0; + for (const [relPath, file] of files) { + const absPath = path.resolve(repoDir, relPath); + await ensureDirExists(absPath); + const text = await file.text(); + await fs.writeFile(absPath, text, 'utf8'); + count++; + } + return count; +} + +async function collectFilesFromRepo( + repoDir: string, +): Promise> { + const root = path.resolve(repoDir, 'vault'); + const out = new Map(); - // TODO - if (a && typeof a === 'object' && 'id' in a && a.id === adapterID) { - adapter = a as Adapter; + async function walk(dir: string) { + let entries: Array; + try { + entries = await fs.readdir(dir, { withFileTypes: true }); + } catch { + return; + } + for (const entry of entries) { + const full = path.join(dir, entry.name); + if (entry.isDirectory()) { + await walk(full); + } else if (entry.isFile()) { + const relFromRepo = path + .relative(repoDir, full) + .split(path.sep) + .join('/'); + const text = await fs.readFile(full, 'utf8'); + const f = new File([text], entry.name, { type: 'text/plain' }); + out.set(relFromRepo, f); } } } - if (!adapter) throw new Error(`Could not find adapter for key ${adapterID}`); + await walk(root); + return out; +} + +// ------------------------------------------------------------- +// Import command (ZIP ingest via adapter ingestor) +// ------------------------------------------------------------- +async function cmdImport(args: CLIArgs, adapterID: string) { + const { file, db } = args; + if (!file) throw new Error('--file is required for import command'); + if (!db) throw new Error('--db is required for import command'); + + const zipPath = resolveZipPath(file); + const dbFile = resolveDbFile(db); + const dbUrl = toDbUrl(dbFile); + + // Prepare DB + await ensureDirExists(dbFile); + const client = createClient({ url: dbUrl }); + const rawDb = drizzle(client); + + // Read ZIP and wrap in File for bun runtime + const data = await fs.readFile(zipPath); + const blob = new Blob([new Uint8Array(data)], { type: 'application/zip' }); + const zipFile = new File([blob], path.basename(zipPath), { + type: 'application/zip', + }); - // Initialize Vault (runs migrations implicitly) - const vault = await Vault.create({ + // Resolve adapter and create vault + const adapter = await findAdapter(adapterID); + const vault = createVault({ adapters: [adapter], - database: db, - migrateFunc: migrate, + // @ts-expect-error works but slight type mismatch + database: rawDb, }); - const summary = await vault.importBlob(blob, adapterID); - for (const r of summary.reports) { - console.log(`\n=== Adapter: ${r.adapter} ===`); - printCounts(r.counts); - } - console.log(`\nAll adapters complete. DB path: ${dbFile}`); + // Ingest data through adapter's ingestor + await vault.ingestData({ adapter, file: zipFile }); - vault.getCurrentLayout(); + console.log( + `\nIngest complete for adapter '${adapterID}'. DB path: ${dbFile}`, + ); } -function printCounts(parsedOrCounts: Record) { - const entries: [string, number][] = Object.entries(parsedOrCounts).map( - ([k, v]) => [ - k, - typeof v === 'number' ? v : Array.isArray(v) ? v.length : 0, - ], - ); - const maxKey = Math.max(...entries.map(([k]) => k.length), 10); - for (const [k, n] of entries.sort((a, b) => a[0].localeCompare(b[0]))) { - console.log(`${k.padEnd(maxKey, ' ')} : ${n}`); - } +// ------------------------------------------------------------- +// Export DB -> Files (Markdown only) +// ------------------------------------------------------------- +async function cmdExportFs(args: CLIArgs, adapterID: string) { + const { db } = args; + if (!db) throw new Error('--db is required for export-fs command'); + + const dbFile = resolveDbFile(db); + const dbUrl = toDbUrl(dbFile); + const repoDir = resolveRepoDir(args.repo); + + await ensureDirExists(dbFile); + const client = createClient({ url: dbUrl }); + const rawDb = drizzle(client); + + // Resolve adapter and create vault + const adapter = await findAdapter(adapterID); + const vault = createVault({ + adapters: [adapter], + // @ts-expect-error works but slight type mismatch + database: rawDb, + }); + + // Export files as Map using markdown codec and default conventions + const files = await vault.exportData({ + adapterIDs: [adapterID], + codec: markdownFormat, + conventions: defaultConvention(), + }); + + const n = await writeFilesToRepo(repoDir, files); + console.log(`Exported ${n} files to ${repoDir}/vault/${adapterID}`); } // ------------------------------------------------------------- -// Serve command (stub) +// Import Files -> DB (Markdown only) // ------------------------------------------------------------- -async function cmdServe(args: CLIArgs) { - const dbFile = resolveDbFile(args.db); +async function cmdImportFs(args: CLIArgs, adapterID: string) { + const { db } = args; + if (!db) throw new Error('--db is required for import-fs command'); + + const dbFile = resolveDbFile(db); const dbUrl = toDbUrl(dbFile); + const repoDir = resolveRepoDir(args.repo); + + await ensureDirExists(dbFile); + const client = createClient({ url: dbUrl }); + const rawDb = drizzle(client); + + // Resolve adapter and create vault + const adapter = await findAdapter(adapterID); + const vault = createVault({ + adapters: [adapter], + // @ts-expect-error works but slight type mismatch + database: rawDb, + }); + + // Read files under repoDir/vault and import via markdown codec + const files = await collectFilesFromRepo(repoDir); + await vault.importData({ + files, + codec: markdownFormat, + }); - console.log('Serve is not implemented in this minimal demo.'); console.log( - 'Intended behavior: start an MCP server sourced by the adapter and DB.', + `Imported files from ${repoDir}/vault/${adapterID} into DB ${dbFile}`, ); - console.log(`DB path: ${dbFile}`); - console.log(`DB URL: ${dbUrl}`); - console.log('Exiting.'); } // ------------------------------------------------------------- @@ -181,24 +303,36 @@ async function main() { const argv = process.argv.slice(2); const args = parseArgs(argv); - const command = args._.at(0) ?? 'import'; + // Global help + if (argv.includes('--help') || argv.includes('-h') || args._[0] === 'help') { + printHelp(); + return; + } + + const command = args._[0] ?? 'import'; switch (command) { case 'import': { - const adapter = args._[1]; + // Default to 'reddit' for this demo if adapter not provided + const adapter = args._[1] ?? 'reddit'; await cmdImport(args, adapter); } break; - case 'serve': - await cmdServe(args); + case 'export-fs': + { + const adapter = args._[1] ?? 'reddit'; + await cmdExportFs(args, adapter); + } + break; + case 'import-fs': + { + const adapter = args._[1] ?? 'reddit'; + await cmdImportFs(args, adapter); + } break; default: console.error(`Unknown command: ${command}`); - console.error('Usage:'); - console.error( - ' bun run src/cli.ts import [--file ] [--db ]', - ); - console.error(' bun run src/cli.ts serve [--db ]'); + printHelp(); process.exit(1); } } diff --git a/apps/vault-demo/.gitignore b/apps/vault-demo/.gitignore new file mode 100644 index 0000000000..3b462cb0c4 --- /dev/null +++ b/apps/vault-demo/.gitignore @@ -0,0 +1,23 @@ +node_modules + +# Output +.output +.vercel +.netlify +.wrangler +/.svelte-kit +/build + +# OS +.DS_Store +Thumbs.db + +# Env +.env +.env.* +!.env.example +!.env.test + +# Vite +vite.config.js.timestamp-* +vite.config.ts.timestamp-* diff --git a/apps/vault-demo/.npmrc b/apps/vault-demo/.npmrc new file mode 100644 index 0000000000..b6f27f1359 --- /dev/null +++ b/apps/vault-demo/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/apps/vault-demo/README.md b/apps/vault-demo/README.md new file mode 100644 index 0000000000..12e018eb10 --- /dev/null +++ b/apps/vault-demo/README.md @@ -0,0 +1,71 @@ +# Vault Demo (SvelteKit) + +Overview + +- The demo shows how independent adapters can be composed into one runtime Vault: + - Import/Export of adapter data + - Reddit GDPR upload with entity suggestions → user-curated Entity Index import + - Notes creation with entity linking + - Cross-adapter views: Dashboard, Entities, Notes +- Adapters are independent; the app composes them through runtime joins exposed by [getQueryInterface()](packages/vault-core/src/core/vault.ts:317). + +Quick start (Bun) + +- Prerequisite: Bun installed. +- From repo root: + +```sh +bun install +bun run dev --filter=vault-demo +``` + +- Open http://localhost:5173 +- Note: The demo uses an in-memory DB with a vault singleton at [apps/vault-demo/src/lib/vault/singleton.ts](apps/vault-demo/src/lib/vault/singleton.ts) so data persists across routes during a single browser session. + +Key flows + +- Import/Export + - Visit /import-export + - Import: select a folder or multiple files exported by Vault and choose the adapter; the page calls [importData()](packages/vault-core/src/core/vault.ts:176) using [jsonFormat](packages/vault-core/src/codecs/json.ts:3). + - Export: click Export to get a list of files; download per file. The page uses [exportData()](packages/vault-core/src/core/vault.ts:116). + +- Reddit GDPR upload + suggestions → Entity Index import + - Visit /reddit-upload + - Ingest a Reddit file via [ingestData()](packages/vault-core/src/core/vault.ts:284) with [redditAdapter()](packages/vault-core/src/adapters/reddit/src/adapter.ts:12). + - Click “Suggest entities” to scan imported rows using [apps/vault-demo/src/lib/extract/redditEntities.ts](apps/vault-demo/src/lib/extract/redditEntities.ts:1) with heuristics: subreddits r/..., users u/..., URL domains. + - Select entities and import into Entity Index via [importData()](packages/vault-core/src/core/vault.ts:176) using [entityIndexAdapter()](packages/vault-core/src/adapters/entity-index/src/adapter.ts:89) validator. + +- Notes creation + entity linking + - Visit /notes/new + - Create a note with title, body, and pick entities to link; the page writes to Example Notes through [importData()](packages/vault-core/src/core/vault.ts:176) using [exampleNotesAdapter()](packages/vault-core/src/adapters/example-notes/src/adapter.ts:147). + - Visit /entities and click an entity; the detail shows occurrences and “Linked Notes”, parsed from the Notes adapter’s entity_links JSON column (see [packages/vault-core/src/adapters/example-notes/src/adapter.ts](packages/vault-core/src/adapters/example-notes/src/adapter.ts:1)). + +- Dashboard + - Visit /dashboard to see per-adapter table row counts aggregated at runtime via [getQueryInterface()](packages/vault-core/src/core/vault.ts:317). + +Architecture notes + +- Vault wiring is centralized in [apps/vault-demo/src/lib/vault/client.ts](apps/vault-demo/src/lib/vault/client.ts:1) using [createVault()](packages/vault-core/src/core/vault.ts:31). +- The demo uses an in-memory MockDrizzle at [apps/vault-demo/src/lib/vault/mockDrizzle.ts](apps/vault-demo/src/lib/vault/mockDrizzle.ts:1). +- Adapters + - Reddit: [packages/vault-core/src/adapters/reddit/src/adapter.ts](packages/vault-core/src/adapters/reddit/src/adapter.ts:1) + - Entity Index: [packages/vault-core/src/adapters/entity-index/src/adapter.ts](packages/vault-core/src/adapters/entity-index/src/adapter.ts:1) + - Example Notes: [packages/vault-core/src/adapters/example-notes/src/adapter.ts](packages/vault-core/src/adapters/example-notes/src/adapter.ts:1) + +Data model highlights + +- Entity Index stores canonical entities and occurrences; they are user-curated in this demo, not auto-derived. +- Example Notes stores notes with entity_links as a TEXT JSON array; validators (arktype) accept string[] and serialize to DB-ready JSON. +- All export/import uses [jsonFormat](packages/vault-core/src/codecs/json.ts:3). + +Limitations + +- No persistence beyond a browser session; refresh clears data. +- Export is per-file downloads; no archive bundling. +- The Reddit heuristic extractor is intentionally simple. + +Test references + +- [packages/vault-core/tests/fixtures/entity-index-fixture.ts](packages/vault-core/tests/fixtures/entity-index-fixture.ts:1) +- [packages/vault-core/tests/example-notes.spec.ts](packages/vault-core/tests/example-notes.spec.ts:1) +- [packages/vault-core/tests/entity-index.spec.ts](packages/vault-core/tests/entity-index.spec.ts:1) diff --git a/apps/vault-demo/package.json b/apps/vault-demo/package.json new file mode 100644 index 0000000000..d7008aacf5 --- /dev/null +++ b/apps/vault-demo/package.json @@ -0,0 +1,26 @@ +{ + "name": "vault-demo", + "private": true, + "version": "0.0.1", + "type": "module", + "scripts": { + "dev": "bun --bun vite dev", + "build": "bun --bun vite build", + "preview": "bun --bun vite preview", + "prepare": "svelte-kit sync || echo ''", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json" + }, + "devDependencies": { + "@repo/vault-core": "workspace:*", + "@sveltejs/adapter-auto": "^6.1.0", + "@sveltejs/kit": "catalog:", + "@sveltejs/vite-plugin-svelte": "catalog:", + "arktype": "catalog:", + "bun-types": "^1.3.0", + "drizzle-orm": "catalog:", + "svelte": "catalog:", + "svelte-check": "catalog:", + "typescript": "catalog:", + "vite": "catalog:" + } +} diff --git a/apps/vault-demo/src/app.d.ts b/apps/vault-demo/src/app.d.ts new file mode 100644 index 0000000000..da08e6da59 --- /dev/null +++ b/apps/vault-demo/src/app.d.ts @@ -0,0 +1,13 @@ +// See https://svelte.dev/docs/kit/types#app.d.ts +// for information about these interfaces +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface PageState {} + // interface Platform {} + } +} + +export {}; diff --git a/apps/vault-demo/src/app.html b/apps/vault-demo/src/app.html new file mode 100644 index 0000000000..f273cc58f7 --- /dev/null +++ b/apps/vault-demo/src/app.html @@ -0,0 +1,11 @@ + + + + + + %sveltekit.head% + + +
%sveltekit.body%
+ + diff --git a/apps/vault-demo/src/lib/export/index.ts b/apps/vault-demo/src/lib/export/index.ts new file mode 100644 index 0000000000..2914483e44 --- /dev/null +++ b/apps/vault-demo/src/lib/export/index.ts @@ -0,0 +1,15 @@ +import { ZIP } from '@repo/vault-core/utils/archive/zip'; +import { getVault, jsonFormat } from '$lib/server/vaultService'; + +export const exportZip = async () => { + const vault = getVault(); + const filesMap = await vault.exportData({ codec: jsonFormat }); + const all = await Promise.all( + filesMap + .entries() + .map(async ([path, file]) => [path, await file.bytes()] as const), + ); + const rec = Object.fromEntries(all); + const zipped = await ZIP.pack(rec); + return zipped; +}; diff --git a/apps/vault-demo/src/lib/extract/redditEntities.ts b/apps/vault-demo/src/lib/extract/redditEntities.ts new file mode 100644 index 0000000000..59a23fa2bc --- /dev/null +++ b/apps/vault-demo/src/lib/extract/redditEntities.ts @@ -0,0 +1,205 @@ +/** + * Small extraction utility: scan Reddit rows to suggest entities and occurrences. + * Heuristics: + * - Subreddits: /\br\/([A-Za-z0-9_]+)\b/ + * - Users: /\bu\/([A-Za-z0-9_-]+)\b/ + * - Domains: any "url" field parsed with new URL().hostname + */ + +export type ExtractedEntity = { + id: string; + name: string; + type: 'subreddit' | 'user' | 'domain'; + description?: string | null; + public_id?: string | null; + created_at: number; +}; + +export type ExtractedOccurrence = { + id: string; + entity_id: string; + source_adapter_id: 'reddit'; + source_table_name: string; + source_pk_json: string; + discovered_at: number; +}; + +type TablesToRows = Record[]>; + +export function extractEntitiesFromReddit(tablesToRows: TablesToRows): { + entities: ExtractedEntity[]; + occurrences: ExtractedOccurrence[]; +} { + const subredditRe = /\br\/([A-Za-z0-9_]+)\b/g; + const userRe = /\bu\/([A-Za-z0-9_-]+)\b/g; + + const entitiesByKey = new Map(); + const occurrenceIds = new Set(); + const occurrences: ExtractedOccurrence[] = []; + const now = Date.now(); + + const ensureEntity = ( + type: ExtractedEntity['type'], + name: string, + ): ExtractedEntity => { + const key = `${type}|${name}`; + let ent = entitiesByKey.get(key); + if (!ent) { + ent = { + id: `${type}:${name}`, + name, + type, + description: null, + public_id: null, + created_at: now, + }; + entitiesByKey.set(key, ent); + } + return ent; + }; + + for (const [tableName, rows] of Object.entries(tablesToRows ?? {})) { + for (const row of rows ?? []) { + // Scan string fields + for (const [field, v] of Object.entries(row)) { + if (typeof v !== 'string' || !v) continue; + + // Domain from URL fields + if (field === 'url') { + const host = safeHostname(v); + if (host) { + const ent = ensureEntity('domain', host); + pushOccurrence( + ent.id, + tableName, + row, + occurrences, + occurrenceIds, + now, + ); + } + } + + // Subreddit mentions + for (const m of v.matchAll(subredditRe)) { + const name = m[1]; + if (!name) continue; + const ent = ensureEntity('subreddit', name); + pushOccurrence( + ent.id, + tableName, + row, + occurrences, + occurrenceIds, + now, + ); + } + + // User mentions + for (const m of v.matchAll(userRe)) { + const name = m[1]; + if (!name) continue; + const ent = ensureEntity('user', name); + pushOccurrence( + ent.id, + tableName, + row, + occurrences, + occurrenceIds, + now, + ); + } + } + } + } + + return { + entities: Array.from(entitiesByKey.values()).sort(byTypeThenName), + occurrences, + }; +} + +function byTypeThenName(a: ExtractedEntity, b: ExtractedEntity): number { + if (a.type !== b.type) return a.type < b.type ? -1 : 1; + return a.name.localeCompare(b.name); +} + +function safeHostname(url: string): string | null { + try { + const u = new URL(url); + return u.hostname || null; + } catch { + return null; + } +} + +function pickStablePk(row: Record): Record { + // Prefer common primary keys if present + const candidates = ['id', 'permalink', 'message_id', 'username']; + for (const k of candidates) { + const v = row[k as keyof typeof row]; + if (typeof v === 'string' && v) return { [k]: v }; + if (typeof v === 'number') return { [k]: v }; + } + // Next, include a small set of stable fields if present + const stable: Record = {}; + const fallbacks = ['url', 'subreddit', 'link', 'post_id', 'thread_id']; + for (const k of fallbacks) { + const v = row[k as keyof typeof row]; + if (typeof v === 'string' && v) stable[k] = v; + if (typeof v === 'number') stable[k] = v; + } + if (Object.keys(stable).length > 0) return stable; + // Final fallback: first two primitive fields in alpha key order + const prims: [string, unknown][] = Object.keys(row) + .sort() + .map((k) => [k, row[k as keyof typeof row] as unknown] as [string, unknown]) + .filter(([, v]) => typeof v === 'string' || typeof v === 'number') + .slice(0, 2); + const out: Record = {}; + for (const [k, v] of prims) out[k] = v; + return out; +} + +function pushOccurrence( + entityId: string, + tableName: string, + row: Record, + out: ExtractedOccurrence[], + seen: Set, + now: number, +) { + const pkObj = pickStablePk(row); + const pkJson = JSON.stringify(pkObj); + const id = makeOccurrenceId(entityId, tableName, pkJson); + if (seen.has(id)) return; + seen.add(id); + out.push({ + id, + entity_id: entityId, + source_adapter_id: 'reddit', + source_table_name: tableName, + source_pk_json: pkJson, + discovered_at: now, + }); +} + +function makeOccurrenceId( + entityId: string, + table: string, + pkJson: string, +): string { + const base = `${entityId}|${table}|${pkJson}`; + const h = hashString(base); + return `occ:${h}`; +} + +function hashString(s: string): string { + // Simple 32-bit FNV-1a + let h = 0x811c9dc5 >>> 0; + for (let i = 0; i < s.length; i++) { + h ^= s.charCodeAt(i); + h = Math.imul(h, 0x01000193) >>> 0; + } + return h.toString(16); +} diff --git a/apps/vault-demo/src/lib/remote/entityIndex.remote.ts b/apps/vault-demo/src/lib/remote/entityIndex.remote.ts new file mode 100644 index 0000000000..e528ad1d40 --- /dev/null +++ b/apps/vault-demo/src/lib/remote/entityIndex.remote.ts @@ -0,0 +1,92 @@ +import { error } from '@sveltejs/kit'; +import { eq } from 'drizzle-orm'; +import { command, query } from '$app/server'; +import { InsertEntitiesInputSchema } from '$lib/schemas/entities'; +import { type EntityRow, EntityRowSchema, IdSchema } from '$lib/schemas/notes'; +import { getVault } from '$lib/server/vaultService'; + +/** + * Command: bulk insert into the entity_index adapter tables using the Vault query interface. + */ +export const insertEntities = command( + InsertEntitiesInputSchema, + async (input) => { + const { db, tables } = getVault().getQueryInterface(); + const entitiesTable = tables.entity_index.entity_index_entities; + const occurrencesTable = tables.entity_index.entity_index_occurrences; + + const entityRows = input.entities.map((e) => ({ + id: e.id, + name: e.name ?? null, + type: e.type ?? null, + description: e.description, + public_id: e.public_id, + created_at: new Date(), + })); + + const occurrenceRows = input.occurrences.map((o) => ({ + id: o.id, + entity_id: o.entity_id, + source_adapter_id: o.source_adapter_id, + source_table_name: o.source_table_name, + source_pk_json: o.source_pk_json, + discovered_at: new Date(), + })); + + // Insert entities with conflict-ignore + if (entityRows.length > 0) + await db.insert(entitiesTable).values(entityRows).onConflictDoNothing(); + + // Insert occurrences with conflict-ignore (by PK) when supported + if (occurrenceRows.length > 0) + await db + .insert(occurrencesTable) + .values(occurrenceRows) + .onConflictDoNothing(); + + return { + ok: true as const, + inserted: { + entities: entityRows.length, + occurrences: occurrenceRows.length, + }, + }; + }, +); + +/** + * Query: list entities for selection in the "new note" form. + */ +export const getEntities = query(async () => { + const { db, tables } = getVault().getQueryInterface(); + + const entitiesTable = tables.entity_index.entity_index_entities; + + const rows = await db.select().from(entitiesTable); + + return rows; +}); + +/** + * Query: get a single entity by ID. + */ +export const getEntityById = query(IdSchema, async ({ id }) => { + const { db, tables } = getVault().getQueryInterface(); + + const entitiesTable = tables.entity_index.entity_index_entities; + + const row = await db + .select() + .from(entitiesTable) + .where(eq(entitiesTable.id, id)) + .limit(1) + .get(); + + if (!row) return error(404, 'entity not found'); + + return EntityRowSchema({ + id: row.id, + name: row.name, + type: row.type, + }) as EntityRow; +}); diff --git a/apps/vault-demo/src/lib/remote/notes.remote.ts b/apps/vault-demo/src/lib/remote/notes.remote.ts new file mode 100644 index 0000000000..1ff38522df --- /dev/null +++ b/apps/vault-demo/src/lib/remote/notes.remote.ts @@ -0,0 +1,138 @@ +import { error, redirect } from '@sveltejs/kit'; +import { eq } from 'drizzle-orm'; +import { form, query } from '$app/server'; +import { + CreateNoteInputSchema, + IdSchema, + type NoteView, + NoteViewSchema, + UpdateNoteInputSchema, +} from '$lib/schemas/notes'; +import { getVault } from '$lib/server/vaultService'; + +function parseStringArrayJson(text: unknown): string[] { + if (text == null) return []; + if (typeof text === 'string') { + const v = JSON.parse(text); + return Array.isArray(v) ? v.map(String) : []; + } + if (Array.isArray(text)) return text.map(String); + return []; +} + +function asEpochMs(v: unknown): number { + if (v instanceof Date) return v.getTime(); + if (typeof v === 'number') return v; + const n = Number(v); + return Number.isFinite(n) ? n : Date.now(); +} + +/** + * Query: return latest notes as an array of NoteView (parsed entity_links). + */ +export const getNotes = query(async (): Promise => { + const { db, tables } = getVault().getQueryInterface(); + + const notesTable = tables.example_notes.example_notes_items; + + const rows = await db.select().from(notesTable); + + const notes = rows + .map( + (r) => + NoteViewSchema({ + id: r.id, + title: (r.title ?? '').toString(), + body: r.body == null ? undefined : r.body.toString(), + created_at: asEpochMs(r.created_at), + entity_links: parseStringArrayJson(r.entity_links), + }) as NoteView, + ) + .sort((a, b) => b.created_at - a.created_at); + + return notes; +}); + +/** + * Form: create a new note directly via Drizzle insert. + */ +export const createNote = form(CreateNoteInputSchema, async (input) => { + const { title, body, entity_links } = input; + const trimmed = title.trim(); + if (trimmed.length === 0) throw new Error('title is required'); + + const { db, tables } = getVault().getQueryInterface(); + const notesTable = tables.example_notes.example_notes_items; + + const id = globalThis.crypto?.randomUUID(); + + await db.insert(notesTable).values({ + id, + title: trimmed, + body: body, + // tags: [], // This isn't working, not sure if I need to make sure schema is synced + // Store entity_links as canonical JSON string (TEXT) to match schema/default "[]" + entity_links: JSON.stringify(entity_links), + created_at: new Date(), + public_id: null, + }); + + return redirect(303, `/notes/${id}`); +}); + +/** + * Fetch a single note by id. Returns a NoteView or + */ +export const getNoteById = query(IdSchema, async ({ id }) => { + const vault = getVault(); + const { db, tables } = vault.getQueryInterface(); + const notesTable = tables.example_notes.example_notes_items; + + const rows = await db.select().from(notesTable).where(eq(notesTable.id, id)); + const row = rows?.[0]; + if (!row) return error(404, 'note not found'); + + return NoteViewSchema({ + id: row.id, + title: (row.title ?? '').toString(), + body: row.body == null ? undefined : row.body.toString(), + created_at: asEpochMs(row.created_at), + entity_links: parseStringArrayJson(row.entity_links), + }) as NoteView; +}); + +/** + * Update a note by id. Supports partial updates: title, body, entity_links + */ +export const updateNote = form( + UpdateNoteInputSchema, + async ({ id, title, body, entity_links }) => { + const { db, tables } = getVault().getQueryInterface(); + const notesTable = tables.example_notes.example_notes_items; + + const updates: Record = {}; + if (typeof title === 'string') updates.title = title; + if (typeof body === 'string') updates.body = body; + if (Array.isArray(entity_links)) + updates.entity_links = JSON.stringify(entity_links); + + if (Object.keys(updates).length === 0) return { ok: true }; + + await db.update(notesTable).set(updates).where(eq(notesTable.id, id)); + + return { ok: true }; + }, +); + +/** + * Delete a note by id. + */ +export const deleteNote = form(IdSchema, async ({ id }) => { + const { db, tables } = getVault().getQueryInterface(); + const notesTable = tables.example_notes?.example_notes_items; + if (!notesTable) throw new Error('notes table missing'); + + await db.delete(notesTable).where(eq(notesTable.id, id)); + + return redirect(303, '/notes'); +}); diff --git a/apps/vault-demo/src/lib/remote/vault.remote.ts b/apps/vault-demo/src/lib/remote/vault.remote.ts new file mode 100644 index 0000000000..f751661ee1 --- /dev/null +++ b/apps/vault-demo/src/lib/remote/vault.remote.ts @@ -0,0 +1,43 @@ +import { redditAdapter } from '@repo/vault-core/adapters/reddit'; +import { ZIP } from '@repo/vault-core/utils/archive/zip'; +import { form, query } from '$app/server'; +import { + ImportBundleInputSchema, + IngestFileInputSchema, +} from '$lib/schemas/vault'; +import { getTableCounts, getVault, jsonFormat } from '$lib/server/vaultService'; + +/** + * Query: get per-table row counts grouped by adapter id. + */ +export const getCounts = query(async () => { + return await getTableCounts(); +}); + +/** + * Form: import a bundle of files (multi-adapter). Pairs uploaded files with + * the client-provided paths derived from directory selection. + */ +export const importBundle = form(ImportBundleInputSchema, async (input) => { + const filesInput = input.files; + const files = new Map( + // Here `file.name` should be the relative path within the selected directory + // E.g. `vault-export/vault/entity_index/entity_index_entities/subreddit_todayilearned.json` + Object.values(filesInput).map((file) => [file.name, file]), + ); + await getVault().importData({ files, codec: jsonFormat }); + + return { message: `Imported ${files.size} files` }; +}); + +/** + * Form: ingest a single uploaded file for a specified adapter. + * Validate adapter via simple runtime check; assert File at runtime. + */ +export const ingest = form(IngestFileInputSchema, async (input) => { + const fdFile = input.file; + + await getVault().ingestData({ adapter: redditAdapter(), file: fdFile }); + + return { message: `Ingested file: ${fdFile.name}` }; +}); diff --git a/apps/vault-demo/src/lib/schemas/entities.ts b/apps/vault-demo/src/lib/schemas/entities.ts new file mode 100644 index 0000000000..2ba8c28a43 --- /dev/null +++ b/apps/vault-demo/src/lib/schemas/entities.ts @@ -0,0 +1,22 @@ +import { type } from 'arktype'; + +export const InsertEntitiesInputSchema = type({ + entities: type({ + id: 'string', + name: 'string | null | undefined', + type: 'string | null | undefined', + description: 'string | null | undefined', + public_id: 'string | null | undefined', + created_at: 'string | number | Date | null | undefined', + }).array(), + occurrences: type({ + id: 'string', + entity_id: 'string', + source_adapter_id: 'string', + source_table_name: 'string', + source_pk_json: 'string', + discovered_at: 'string | number | Date | null | undefined', + }).array(), +}); + +export type InsertEntitiesInput = typeof InsertEntitiesInputSchema.infer; diff --git a/apps/vault-demo/src/lib/schemas/notes.ts b/apps/vault-demo/src/lib/schemas/notes.ts new file mode 100644 index 0000000000..9ec5de2187 --- /dev/null +++ b/apps/vault-demo/src/lib/schemas/notes.ts @@ -0,0 +1,50 @@ +// Centralized ArkType schemas and inferred types for the Notes feature. +// Single source of truth for request/response shapes. + +import { type } from 'arktype'; + +// Public view of a note returned to the client/UI +export const NoteViewSchema = type({ + id: 'string', + title: 'string', + body: 'string | undefined', + created_at: 'number', + entity_links: type('string[]'), +}); +export type NoteView = typeof NoteViewSchema.infer; + +// Entity rows (lightweight shape for pickers and linking) +export const EntityRowSchema = type({ + id: 'string', + name: 'string | null | undefined', + type: 'string | null | undefined', +}); +export type EntityRow = typeof EntityRowSchema.infer; + +// Create note input payload +export const CreateNoteInputSchema = type({ + title: 'string', + body: 'string', + 'entity_links?': type('string[]'), +}); +export type CreateNoteInput = typeof CreateNoteInputSchema.infer; + +// I'm not implementing partial updates at the schema level for simplicity +export const UpdateNoteInputSchema = type.and( + CreateNoteInputSchema, + type({ + id: 'string', + }), +); +export type UpdateNoteInput = typeof UpdateNoteInputSchema.infer; + +export const IdSchema = type({ + id: 'string', +}); +export type IdInput = typeof IdSchema.infer; + +// Optional list filter +export const ListNotesInputSchema = type({ + search: 'string | undefined', +}); +export type ListNotesInput = typeof ListNotesInputSchema.infer; diff --git a/apps/vault-demo/src/lib/schemas/vault.ts b/apps/vault-demo/src/lib/schemas/vault.ts new file mode 100644 index 0000000000..4feed1ac2b --- /dev/null +++ b/apps/vault-demo/src/lib/schemas/vault.ts @@ -0,0 +1,13 @@ +import { type } from 'arktype'; + +export const IngestFileInputSchema = type({ + adapter: 'string', + file: 'File', +}); + +export const ImportBundleInputSchema = type({ + files: type('Record'), +}); + +export type IngestFileInput = typeof IngestFileInputSchema.infer; +export type ImportBundleInput = typeof ImportBundleInputSchema.infer; diff --git a/apps/vault-demo/src/lib/server/vaultService.ts b/apps/vault-demo/src/lib/server/vaultService.ts new file mode 100644 index 0000000000..3722b188ac --- /dev/null +++ b/apps/vault-demo/src/lib/server/vaultService.ts @@ -0,0 +1,64 @@ +import { Database } from 'bun:sqlite'; +import fs from 'node:fs'; +import path from 'node:path'; +import { createVault } from '@repo/vault-core'; +import { entityIndexAdapter } from '@repo/vault-core/adapters/entity-index'; +import { exampleNotesAdapter } from '@repo/vault-core/adapters/example-notes'; +import { redditAdapter } from '@repo/vault-core/adapters/reddit'; +import { jsonFormat } from '@repo/vault-core/codecs'; +import { drizzle } from 'drizzle-orm/bun-sqlite'; + +// Ensure data directory and DB path exist (stable across CWDs) +// Priority: +// 1) VAULT_DB_PATH (explicit override) +// 2) If CWD is apps/vault-demo -> use ".data/vault.sqlite" +// 3) Otherwise (running from monorepo root) -> "apps/vault-demo/.data/vault.sqlite" +const VAULT_DB_PATH = process.env.VAULT_DB_PATH; +const cwd = process.cwd().replace(/\\/g, '/'); +const isAppCwd = + cwd.endsWith('/apps/vault-demo') || cwd.includes('/apps/vault-demo/'); +const computedDir = isAppCwd + ? path.resolve(cwd, '.data') + : path.resolve(cwd, 'apps/vault-demo/.data'); +const dataDir = VAULT_DB_PATH ? path.dirname(VAULT_DB_PATH) : computedDir; +fs.mkdirSync(dataDir, { recursive: true }); +const dbPath = VAULT_DB_PATH ?? path.join(dataDir, 'vault.sqlite'); +// Optional debug breadcrumb to confirm which file is used +if ( + (process.env.VAULT_DEBUG ?? '').toLowerCase().includes('migrations') || + ['1', 'true', 'all'].includes((process.env.VAULT_DEBUG ?? '').toLowerCase()) +) { + console.info('[vault-demo:vaultService] using sqlite dbPath=', dbPath); +} + +export function getVault() { + const sqlite = new Database(dbPath, { create: true, readwrite: true }); + const db = drizzle(sqlite); + const v = createVault({ + database: db, + adapters: [redditAdapter(), entityIndexAdapter(), exampleNotesAdapter()], + }); + return v; +} + +// Helper to compute table row counts for each adapter/table +export async function getTableCounts() { + const { db, tables } = getVault().getQueryInterface(); + + const result: Record> = {}; + for (const [adapterId, schema] of Object.entries(tables)) { + result[adapterId] = {}; + for (const [tableName, table] of Object.entries(schema)) { + try { + const rows = await db.select().from(table); + result[adapterId][tableName] = Array.isArray(rows) ? rows.length : 0; + } catch { + // Skip non-table exports if any exist + } + } + } + return result; +} + +// Re-export codec for convenience in remote functions +export { jsonFormat }; diff --git a/apps/vault-demo/src/routes/+layout.svelte b/apps/vault-demo/src/routes/+layout.svelte new file mode 100644 index 0000000000..c735fba1ef --- /dev/null +++ b/apps/vault-demo/src/routes/+layout.svelte @@ -0,0 +1,16 @@ + + +
+ +
+ +
+ +
diff --git a/apps/vault-demo/src/routes/+page.svelte b/apps/vault-demo/src/routes/+page.svelte new file mode 100644 index 0000000000..f11d7d55bb --- /dev/null +++ b/apps/vault-demo/src/routes/+page.svelte @@ -0,0 +1,76 @@ + + +

Vault Demo

+

+ This demo shows a server-backed Vault using Bun SQLite (Drizzle) with adapters + for Reddit, Entity Index, and Example Notes. +

+ +

Quickstart

+
    +
  1. + Reddit upload: +
      +
    • Go to Reddit Upload.
    • +
    • Choose your exported Reddit ZIP and click “Ingest”.
    • +
    • Click “Suggest entities” to analyze exported rows.
    • +
    • + Select the entities you want then click “Import selected into Entity + Index”. This writes directly to the Entity Index tables via a server + remote command. +
    • +
    +
  2. +
  3. + Browse entities: +
      +
    • View Entity Index data at Entities.
    • +
    • Click an entity to view its detail page and linked notes.
    • +
    +
  4. +
  5. + Create and edit notes: +
      +
    • + Create a note at New Note then edit it at + Notes. +
    • +
    • + Use the entity picker to link notes to entities (stored as JSON in the + notes table). +
    • +
    +
  6. +
  7. + Import/Export (optional): +
      +
    • + Visit Import/Export to serialize/deserialize + adapter data bundles. +
    • +
    • + Note: The Reddit entities flow now writes directly; Import is reserved + for bundle I/O. +
    • +
    +
  8. +
+ +

Notes

+
    +
  • Server DB file: apps/vault-demo/.data/vault.sqlite (auto-created).
  • +
  • + Row counts for Reddit and Entity Index are visible on the Reddit Upload + page. +
  • +
+ +

Shortcuts

+ diff --git a/apps/vault-demo/src/routes/api/vault/counts/+server.ts b/apps/vault-demo/src/routes/api/vault/counts/+server.ts new file mode 100644 index 0000000000..40a92b465e --- /dev/null +++ b/apps/vault-demo/src/routes/api/vault/counts/+server.ts @@ -0,0 +1,20 @@ +import type { RequestHandler } from '@sveltejs/kit'; +import { getTableCounts } from '$lib/server/vaultService'; + +const json = (data: unknown, init?: ResponseInit) => + new Response(JSON.stringify(data), { + headers: { 'content-type': 'application/json' }, + ...init, + }); + +export const GET: RequestHandler = async () => { + try { + const counts = await getTableCounts(); + return json({ ok: true, counts }); + } catch (err) { + return json( + { ok: false, error: err instanceof Error ? err.message : String(err) }, + { status: 500 }, + ); + } +}; diff --git a/apps/vault-demo/src/routes/api/vault/export/+server.ts b/apps/vault-demo/src/routes/api/vault/export/+server.ts new file mode 100644 index 0000000000..8640bda2b6 --- /dev/null +++ b/apps/vault-demo/src/routes/api/vault/export/+server.ts @@ -0,0 +1,28 @@ +import type { RequestHandler } from '@sveltejs/kit'; +import { getVault, jsonFormat } from '$lib/server/vaultService'; + +const json = (data: unknown, init?: ResponseInit) => + new Response(JSON.stringify(data), { + headers: { 'content-type': 'application/json' }, + ...init, + }); + +export const GET: RequestHandler = async () => { + try { + const vault = getVault(); + const filesMap = await vault.exportData({ codec: jsonFormat }); + + const files: Array<{ path: string; text: string; mimeType: string }> = []; + for (const [path, file] of filesMap.entries()) { + const text = await file.text(); + files.push({ path, text, mimeType: file.type || 'application/json' }); + } + + return json({ ok: true, files }); + } catch (err) { + return json( + { ok: false, error: err instanceof Error ? err.message : String(err) }, + { status: 500 }, + ); + } +}; diff --git a/apps/vault-demo/src/routes/api/vault/import/+server.ts b/apps/vault-demo/src/routes/api/vault/import/+server.ts new file mode 100644 index 0000000000..d2c6d42906 --- /dev/null +++ b/apps/vault-demo/src/routes/api/vault/import/+server.ts @@ -0,0 +1,43 @@ +import type { RequestHandler } from '@sveltejs/kit'; +import { getVault, jsonFormat } from '$lib/server/vaultService'; + +const json = (data: unknown, init?: ResponseInit) => + new Response(JSON.stringify(data), { + headers: { 'content-type': 'application/json' }, + ...init, + }); + +type ImportFile = { path: string; text: string; mimeType?: string }; +type ImportBody = { files: ImportFile[] }; + +export const POST: RequestHandler = async ({ request }) => { + try { + const body = (await request.json()) as Partial; + const filesInput = body?.files; + if (!Array.isArray(filesInput)) { + return json( + { ok: false, error: 'files array required' }, + { status: 400 }, + ); + } + + const files = new Map(); + for (const f of filesInput) { + if (!f || typeof f.path !== 'string' || typeof f.text !== 'string') + continue; + const filename = f.path.split('/').pop() || 'file.json'; + const file = new File([f.text], filename, { + type: f.mimeType ?? 'application/json', + }); + files.set(f.path, file); + } + + await getVault().importData({ files, codec: jsonFormat }); + return json({ ok: true }); + } catch (err) { + return json( + { ok: false, error: err instanceof Error ? err.message : String(err) }, + { status: 500 }, + ); + } +}; diff --git a/apps/vault-demo/src/routes/api/vault/ingest/+server.ts b/apps/vault-demo/src/routes/api/vault/ingest/+server.ts new file mode 100644 index 0000000000..615e85a411 --- /dev/null +++ b/apps/vault-demo/src/routes/api/vault/ingest/+server.ts @@ -0,0 +1,53 @@ +import type { Adapter } from '@repo/vault-core'; +import { entityIndexAdapter } from '@repo/vault-core/adapters/entity-index'; +import { exampleNotesAdapter } from '@repo/vault-core/adapters/example-notes'; +import { redditAdapter } from '@repo/vault-core/adapters/reddit'; +import type { RequestHandler } from '@sveltejs/kit'; +import { getTableCounts, getVault } from '$lib/server/vaultService'; + +const json = (data: unknown, init?: ResponseInit) => + new Response(JSON.stringify(data), { + headers: { 'content-type': 'application/json' }, + ...init, + }); + +export const POST: RequestHandler = async ({ request, url }) => { + try { + const adapterId = url.searchParams.get('adapter'); + if (!adapterId) + return json( + { ok: false, error: 'adapter query param required' }, + { status: 400 }, + ); + + const form = await request.formData(); + const file = form.get('file'); + if (!(file instanceof File)) + return json({ ok: false, error: 'file missing' }, { status: 400 }); + + const factories: Record Adapter> = { + reddit: redditAdapter, + entity_index: entityIndexAdapter, + example_notes: exampleNotesAdapter, + }; + const factory = factories[adapterId]; + if (!factory) + return json( + { ok: false, error: `unknown adapter '${adapterId}'` }, + { status: 400 }, + ); + + const adapter = factory(); + + const vault = getVault(); + await vault.ingestData({ adapter, file }); + + const counts = await getTableCounts(); + return json({ ok: true, counts }); + } catch (err) { + return json( + { ok: false, error: err instanceof Error ? err.message : String(err) }, + { status: 500 }, + ); + } +}; diff --git a/apps/vault-demo/src/routes/dashboard/+page.svelte b/apps/vault-demo/src/routes/dashboard/+page.svelte new file mode 100644 index 0000000000..8bb0f58010 --- /dev/null +++ b/apps/vault-demo/src/routes/dashboard/+page.svelte @@ -0,0 +1,58 @@ + + +

Dashboard

+ + + +{#if adapterIds.length === 0} +

No adapters found.

+{:else} + {#each adapterIds as adapterId} +
+

{adapterId}

+ {#if (countsByAdapter[adapterId]?.length ?? 0) === 0} +

No tables.

+ {:else} +
    + {#each countsByAdapter[adapterId] as c} +
  • {c.table}: {c.count}
  • + {/each} +
+ {/if} +
+ {/each} +{/if} + + diff --git a/apps/vault-demo/src/routes/entities/+page.svelte b/apps/vault-demo/src/routes/entities/+page.svelte new file mode 100644 index 0000000000..2f4e1313de --- /dev/null +++ b/apps/vault-demo/src/routes/entities/+page.svelte @@ -0,0 +1,58 @@ + + +

Entities

+ + + +{#if entities.length === 0} +

No entities.

+{:else} +
+ + + + + + + + + + {#each entities.toSorted( (a, b) => (a.name ?? '').localeCompare(b.name ?? ''), ) as e} + + + + + + + {/each} + +
NameTypeCreatedID
+ {#if e.id} + {e.name ?? '(no name)'} + {:else} + {e.name ?? '(no name)'} + {/if} + {e.type ?? ''}{humanizeCreatedAt((e as any).created_at)}{e.id}
+{/if} + + diff --git a/apps/vault-demo/src/routes/entities/[id]/+page.svelte b/apps/vault-demo/src/routes/entities/[id]/+page.svelte new file mode 100644 index 0000000000..b8dac8f432 --- /dev/null +++ b/apps/vault-demo/src/routes/entities/[id]/+page.svelte @@ -0,0 +1,62 @@ + + +

Entity Detail

+ + + +
+

+ {entity.name ?? '(no name)'} + ({entity.id}) +

+
    +
  • Type: {entity.type ?? ''}
  • +
  • Public ID: {(entity as any).public_id ?? ''}
  • +
  • Created: {humanizeDate((entity as any).created_at)}
  • +
  • Description: {(entity as any).description ?? ''}
  • +
+
+ +
+

Linked Notes

+ {#if linkedNotes.length === 0} +

No linked notes.

+ {:else} + + {/if} +
+ + diff --git a/apps/vault-demo/src/routes/import-export/+page.svelte b/apps/vault-demo/src/routes/import-export/+page.svelte new file mode 100644 index 0000000000..47e8aeb76b --- /dev/null +++ b/apps/vault-demo/src/routes/import-export/+page.svelte @@ -0,0 +1,40 @@ + + +

Import / Export

+ +
+
+

Import

+
+ + +
+
+ +
+ {#each importBundle.fields.allIssues() as issue} +

{issue.message}

+ {/each} + {#if importBundle.result} +

{importBundle.result.message}

+ {/if} +
+
+ +
+ +
+

Export

+
+ + + +
+
+ +

+ Back to Home · + Go to Reddit Upload +

diff --git a/apps/vault-demo/src/routes/import-export/export/+server.ts b/apps/vault-demo/src/routes/import-export/export/+server.ts new file mode 100644 index 0000000000..ecc21b025b --- /dev/null +++ b/apps/vault-demo/src/routes/import-export/export/+server.ts @@ -0,0 +1,12 @@ +import { exportZip } from '$lib/export'; + +export const GET = async () => { + const zipped = await exportZip(); + // @ts-expect-error @types/node, I want to throw you in the sun + return new Response(zipped.buffer, { + headers: { + 'Content-Type': 'application/zip', + 'Content-Disposition': 'attachment; filename="vault-export.zip"', + }, + }); +}; diff --git a/apps/vault-demo/src/routes/notes/+page.svelte b/apps/vault-demo/src/routes/notes/+page.svelte new file mode 100644 index 0000000000..58d747cf8f --- /dev/null +++ b/apps/vault-demo/src/routes/notes/+page.svelte @@ -0,0 +1,96 @@ + + +

Notes

+ + + +
+ +
+ +{#if !isLoaded} +

Loading…

+{:else if errorText} +

{errorText}

+{:else if notes.length === 0} +

No notes found.

+{:else} + +{/if} + + diff --git a/apps/vault-demo/src/routes/notes/[id]/+page.svelte b/apps/vault-demo/src/routes/notes/[id]/+page.svelte new file mode 100644 index 0000000000..d6e26f78ae --- /dev/null +++ b/apps/vault-demo/src/routes/notes/[id]/+page.svelte @@ -0,0 +1,84 @@ + + +

Edit Note

+ + + + +
+ + + + + + +
+ Link to Entities + {#if entities.length === 0} +

No entities available.

+ {:else} +
+ {#each entities as e} + + {/each} +
+ {/if} +
+ +
+ +
+ + {#if updateNote.fields.allIssues()} +

{updateNote.fields.allIssues()?.[0].message}

+ {/if} +
+
+ + +
+ + diff --git a/apps/vault-demo/src/routes/notes/new/+page.svelte b/apps/vault-demo/src/routes/notes/new/+page.svelte new file mode 100644 index 0000000000..50f99c97eb --- /dev/null +++ b/apps/vault-demo/src/routes/notes/new/+page.svelte @@ -0,0 +1,69 @@ + + +

New Note

+ + + +
+ + + + +
+ Link to Entities + {#if entities.length === 0} +

No entities available.

+ {:else} +
+ {#each entities as e} + + {/each} +
+ {/if} +
+ +
+ + Cancel +
+ + {#each createNote.fields.allIssues() as issue} +

{issue.message}

+ {/each} +
+ + diff --git a/apps/vault-demo/src/routes/reddit-upload/+page.svelte b/apps/vault-demo/src/routes/reddit-upload/+page.svelte new file mode 100644 index 0000000000..2090c953a7 --- /dev/null +++ b/apps/vault-demo/src/routes/reddit-upload/+page.svelte @@ -0,0 +1,348 @@ + + +

Reddit Upload

+ +
+ +
+ +
+
+ +
+ + {#if suggestions} + + {/if} +
+ +{#if status.kind === 'success'} +

{status.message}

+{:else if status.kind === 'error'} +

{status.message}

+{/if} + +{#if suggestions} +

Suggested Entities

+ +
+

Subreddits

+
    + {#each suggestions.entities.filter((e) => e.type === 'subreddit') as e} +
  • + +
  • + {/each} +
+
+ +
+

Users

+
    + {#each suggestions.entities.filter((e) => e.type === 'user') as e} +
  • + +
  • + {/each} +
+
+ +
+

Domains

+
    + {#each suggestions.entities.filter((e) => e.type === 'domain') as e} +
  • + +
  • + {/each} +
+
+{/if} + +{#if counts.length > 0} +

Reddit Tables

+ +{/if} + +{#if entityIndexCounts.length > 0} +

Entity Index Tables

+ +{/if} + +

+ Back to Home · + Go to Import/Export +

diff --git a/apps/vault-demo/static/robots.txt b/apps/vault-demo/static/robots.txt new file mode 100644 index 0000000000..b6dd6670cb --- /dev/null +++ b/apps/vault-demo/static/robots.txt @@ -0,0 +1,3 @@ +# allow crawling everything by default +User-agent: * +Disallow: diff --git a/apps/vault-demo/svelte.config.js b/apps/vault-demo/svelte.config.js new file mode 100644 index 0000000000..07715b514b --- /dev/null +++ b/apps/vault-demo/svelte.config.js @@ -0,0 +1,26 @@ +import adapter from '@sveltejs/adapter-auto'; +import { vitePreprocess } from '@sveltejs/vite-plugin-svelte'; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + // Consult https://svelte.dev/docs/kit/integrations + // for more information about preprocessors + preprocess: vitePreprocess(), + + kit: { + // adapter-auto only supports some environments, see https://svelte.dev/docs/kit/adapter-auto for a list. + // If your environment is not supported, or you settled on a specific environment, switch out the adapter. + // See https://svelte.dev/docs/kit/adapters for more information about adapters. + adapter: adapter(), + experimental: { + remoteFunctions: true, + }, + }, + compilerOptions: { + experimental: { + async: true, + }, + }, +}; + +export default config; diff --git a/apps/vault-demo/tsconfig.json b/apps/vault-demo/tsconfig.json new file mode 100644 index 0000000000..f2fe73d2d0 --- /dev/null +++ b/apps/vault-demo/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "./.svelte-kit/tsconfig.json", + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "moduleResolution": "bundler", + "module": "esnext", + "target": "esnext", + "lib": ["esnext", "dom"], + "types": ["bun-types"] + } + // Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias + // except $lib which is handled by https://svelte.dev/docs/kit/configuration#files + // + // To make changes to top-level options such as include and exclude, we recommend extending + // the generated config; see https://svelte.dev/docs/kit/configuration#typescript +} diff --git a/apps/vault-demo/vite.config.ts b/apps/vault-demo/vite.config.ts new file mode 100644 index 0000000000..fce9e8704c --- /dev/null +++ b/apps/vault-demo/vite.config.ts @@ -0,0 +1,6 @@ +import { sveltekit } from '@sveltejs/kit/vite'; +import { defineConfig } from 'vite'; + +export default defineConfig({ + plugins: [sveltekit()], +}); diff --git a/bun.lock b/bun.lock index 57ce984de5..c993ac99cf 100644 --- a/bun.lock +++ b/bun.lock @@ -59,6 +59,7 @@ "version": "0.0.0", "dependencies": { "@libsql/client": "^0.11.0", + "@repo/vault-core": "workspace:*", "drizzle-orm": "catalog:", }, }, @@ -94,54 +95,21 @@ "wrangler": "^4.28.0", }, }, - "apps/sh": { - "name": "@epicenter/sh", + "apps/vault-demo": { + "name": "vault-demo", "version": "0.0.1", - "dependencies": { - "@repo/constants": "workspace:*", - "@repo/shared": "workspace:*", - "@tanstack/svelte-query": "^6.0.0", - "@tanstack/svelte-query-devtools": "^6.0.0", - "@trpc/client": "^11.0.3", - "arktype": "catalog:", - "better-auth": "^1.3.3", - "hono": "^4.8.5", - "isomorphic-dompurify": "^2.26.0", - "marked": "^16.1.1", - "mode-watcher": "catalog:", - "nanoid": "catalog:", - "simple-icons": "^15.7.0", - "svelte-sonner": "catalog:", - "wellcrafted": "catalog:", - }, "devDependencies": { - "@epicenter/api": "workspace:*", - "@eslint/compat": "^1.2.5", - "@eslint/js": "^9.18.0", - "@hey-api/openapi-ts": "^0.78.3", - "@lucide/svelte": "catalog:", - "@repo/config": "workspace:*", - "@repo/svelte-utils": "workspace:*", - "@repo/ui": "workspace:*", - "@sveltejs/adapter-static": "^3.0.8", + "@repo/vault-core": "workspace:*", + "@sveltejs/adapter-auto": "^6.1.0", "@sveltejs/kit": "catalog:", "@sveltejs/vite-plugin-svelte": "catalog:", - "@tailwindcss/vite": "^4.0.0", - "@types/node": "catalog:", - "eslint": "catalog:", - "eslint-config-prettier": "^10.0.1", - "eslint-plugin-svelte": "^3.0.0", - "globals": "^16.0.0", - "prettier": "catalog:", - "prettier-plugin-svelte": "^3.3.3", - "prettier-plugin-tailwindcss": "^0.6.11", + "arktype": "catalog:", + "bun-types": "^1.3.0", + "drizzle-orm": "catalog:", "svelte": "catalog:", "svelte-check": "catalog:", - "tailwindcss": "catalog:", "typescript": "catalog:", - "typescript-eslint": "^8.20.0", "vite": "catalog:", - "wrangler": "^4.25.0", }, }, "apps/whispering": { @@ -319,10 +287,17 @@ "version": "0.0.0", "dependencies": { "arktype": "catalog:", - "drizzle-kit": "catalog:", + "drizzle-arktype": "catalog:", "drizzle-orm": "catalog:", + "fflate": "^0.8.2", + "toml": "^3.0.0", + "yaml": "^2.8.1", }, "devDependencies": { + "@standard-schema/spec": "^1.0.0", + "bun-types": "^1.3.0", + "drizzle-kit": "catalog:", + "tsx": "^4.20.6", "typescript": "catalog:", }, }, @@ -389,8 +364,6 @@ "@ark/util": ["@ark/util@0.46.0", "", {}, "sha512-JPy/NGWn/lvf1WmGCPw2VGpBg5utZraE84I7wli18EDF3p3zc/e9WolT35tINeZO3l7C77SjqRJeAUoT0CvMRg=="], - "@asamuzakjp/css-color": ["@asamuzakjp/css-color@3.2.0", "", { "dependencies": { "@csstools/css-calc": "^2.1.3", "@csstools/css-color-parser": "^3.0.9", "@csstools/css-parser-algorithms": "^3.0.4", "@csstools/css-tokenizer": "^3.0.3", "lru-cache": "^10.4.3" } }, "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw=="], - "@astrojs/check": ["@astrojs/check@0.9.4", "", { "dependencies": { "@astrojs/language-server": "^2.15.0", "chokidar": "^4.0.1", "kleur": "^4.1.5", "yargs": "^17.7.2" }, "peerDependencies": { "typescript": "^5.0.0" }, "bin": { "astro-check": "dist/bin.js" } }, "sha512-IOheHwCtpUfvogHHsvu0AbeRZEnjJg3MopdLddkJE70mULItS/Vh37BHcI00mcOJcH1vhD3odbpvWokpxam7xA=="], "@astrojs/compiler": ["@astrojs/compiler@2.12.2", "", {}, "sha512-w2zfvhjNCkNMmMMOn5b0J8+OmUaBL1o40ipMvqcG6NRpdC+lKxmTi48DT8Xw0SzJ3AfmeFLB45zXZXtmbsjcgw=="], @@ -463,16 +436,6 @@ "@cspotcode/source-map-support": ["@cspotcode/source-map-support@0.8.1", "", { "dependencies": { "@jridgewell/trace-mapping": "0.3.9" } }, "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw=="], - "@csstools/color-helpers": ["@csstools/color-helpers@5.0.2", "", {}, "sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA=="], - - "@csstools/css-calc": ["@csstools/css-calc@2.1.4", "", { "peerDependencies": { "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ=="], - - "@csstools/css-color-parser": ["@csstools/css-color-parser@3.0.10", "", { "dependencies": { "@csstools/color-helpers": "^5.0.2", "@csstools/css-calc": "^2.1.4" }, "peerDependencies": { "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-TiJ5Ajr6WRd1r8HSiwJvZBiJOqtH86aHpUjq5aEKWHiII2Qfjqd/HCWKPOW8EP4vcspXbHnXrwIDlu5savQipg=="], - - "@csstools/css-parser-algorithms": ["@csstools/css-parser-algorithms@3.0.5", "", { "peerDependencies": { "@csstools/css-tokenizer": "^3.0.4" } }, "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ=="], - - "@csstools/css-tokenizer": ["@csstools/css-tokenizer@3.0.4", "", {}, "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw=="], - "@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="], "@emmetio/abbreviation": ["@emmetio/abbreviation@2.3.3", "", { "dependencies": { "@emmetio/scanner": "^1.0.4" } }, "sha512-mgv58UrU3rh4YgbE/TzgLQwJ3pFsHHhCLqY20aJq+9comytTXUDNGG/SMtSeMJdkpxgXSXunBGLD8Boka3JyVA=="], @@ -501,8 +464,6 @@ "@epicenter/posthog-reverse-proxy": ["@epicenter/posthog-reverse-proxy@workspace:apps/posthog-reverse-proxy"], - "@epicenter/sh": ["@epicenter/sh@workspace:apps/sh"], - "@esbuild-kit/core-utils": ["@esbuild-kit/core-utils@3.3.2", "", { "dependencies": { "esbuild": "~0.18.20", "source-map-support": "^0.5.21" } }, "sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ=="], "@esbuild-kit/esm-loader": ["@esbuild-kit/esm-loader@2.6.5", "", { "dependencies": { "@esbuild-kit/core-utils": "^3.3.2", "get-tsconfig": "^4.7.0" } }, "sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA=="], @@ -591,10 +552,6 @@ "@hexagon/base64": ["@hexagon/base64@1.1.28", "", {}, "sha512-lhqDEAvWixy3bZ+UOYbPwUbBkwBq5C1LAJ/xPC8Oi+lL54oyakv/npbA0aU2hgCsx/1NUd4IBvV03+aUBWxerw=="], - "@hey-api/json-schema-ref-parser": ["@hey-api/json-schema-ref-parser@1.0.6", "", { "dependencies": { "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", "js-yaml": "^4.1.0", "lodash": "^4.17.21" } }, "sha512-yktiFZoWPtEW8QKS65eqKwA5MTKp88CyiL8q72WynrBs/73SAaxlSWlA2zW/DZlywZ5hX1OYzrCC0wFdvO9c2w=="], - - "@hey-api/openapi-ts": ["@hey-api/openapi-ts@0.78.3", "", { "dependencies": { "@hey-api/json-schema-ref-parser": "1.0.6", "ansi-colors": "4.1.3", "c12": "2.0.1", "color-support": "1.1.3", "commander": "13.0.0", "handlebars": "4.7.8", "open": "10.1.2" }, "peerDependencies": { "typescript": "^5.5.3" }, "bin": { "openapi-ts": "bin/index.cjs" } }, "sha512-uTP/EGA/mM4MsFN0xGcQ4fkFxeaAUGT2T1VKnNBv6WUwRY7J59Wg8kVuRXn3dLeI/IWUDwNNdHU0SfnEbXEmYw=="], - "@hono/trpc-server": ["@hono/trpc-server@0.4.0", "", { "peerDependencies": { "@trpc/server": "^10.10.0 || >11.0.0-rc", "hono": ">=4.*" } }, "sha512-LGlJfCmNIGMwcknZEIYdujVMs9OkNVazhpOhaz3kTWOXvNL660VOHpvvktosCiJrajyBY1RtIJKQ+IKaQvNuSg=="], "@hono/zod-validator": ["@hono/zod-validator@0.4.2", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-1rrlBg+EpDPhzOV4hT9pxr5+xDVmKuz6YJl+la7VCwK6ass5ldyKm5fD+umJdV2zhHD6jROoCCv8NbTwyfhT0g=="], @@ -883,6 +840,8 @@ "@sveltejs/acorn-typescript": ["@sveltejs/acorn-typescript@1.0.5", "", { "peerDependencies": { "acorn": "^8.9.0" } }, "sha512-IwQk4yfwLdibDlrXVE04jTZYlLnwsTT2PIOQQGNLWfjavGifnk1JD1LcZjZaBTRcxZu2FfPfNLOE04DSu9lqtQ=="], + "@sveltejs/adapter-auto": ["@sveltejs/adapter-auto@6.1.1", "", { "peerDependencies": { "@sveltejs/kit": "^2.0.0" } }, "sha512-cBNt4jgH4KuaNO5gRSB2CZKkGtz+OCZ8lPjRQGjhvVUD4akotnj2weUia6imLl2v07K3IgsQRyM36909miSwoQ=="], + "@sveltejs/adapter-static": ["@sveltejs/adapter-static@3.0.9", "", { "peerDependencies": { "@sveltejs/kit": "^2.0.0" } }, "sha512-aytHXcMi7lb9ljsWUzXYQ0p5X1z9oWud2olu/EpmH7aCu4m84h7QLvb5Wp+CFirKcwoNnYvYWhyP/L8Vh1ztdw=="], "@sveltejs/kit": ["@sveltejs/kit@2.37.0", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/cookie": "^0.6.0", "acorn": "^8.14.1", "cookie": "^0.6.0", "devalue": "^5.3.2", "esm-env": "^1.2.2", "kleur": "^4.1.5", "magic-string": "^0.30.5", "mrmime": "^2.0.0", "sade": "^1.8.1", "set-cookie-parser": "^2.6.0", "sirv": "^3.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.0.0", "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", "svelte": "^4.0.0 || ^5.0.0-next.0", "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["@opentelemetry/api"], "bin": { "svelte-kit": "svelte-kit.js" } }, "sha512-xgKtpjQ6Ry4mdShd01ht5AODUsW7+K1iValPDq7QX8zI1hWOKREH9GjG8SRCN5tC4K7UXmMhuQam7gbLByVcnw=="], @@ -985,8 +944,6 @@ "@tauri-apps/plugin-updater": ["@tauri-apps/plugin-updater@2.9.0", "", { "dependencies": { "@tauri-apps/api": "^2.6.0" } }, "sha512-j++sgY8XpeDvzImTrzWA08OqqGqgkNyxczLD7FjNJJx/uXxMZFz5nDcfkyoI/rCjYuj2101Tci/r/HFmOmoxCg=="], - "@trpc/client": ["@trpc/client@11.4.4", "", { "peerDependencies": { "@trpc/server": "11.4.4", "typescript": ">=5.7.2" } }, "sha512-86OZl+Y+Xlt9ITGlhCMImERcsWCOrVzpNuzg3XBlsDSmSs9NGsghKjeCpJQlE36XaG3aze+o9pRukiYYvBqxgQ=="], - "@trpc/server": ["@trpc/server@11.4.4", "", { "peerDependencies": { "typescript": ">=5.7.2" } }, "sha512-VkJb2xnb4rCynuwlCvgPBh5aM+Dco6fBBIo6lWAdJJRYVwtyE5bxNZBgUvRRz/cFSEAy0vmzLxF7aABDJfK5Rg=="], "@types/bun": ["@types/bun@1.3.1", "", { "dependencies": { "bun-types": "1.3.1" } }, "sha512-4jNMk2/K9YJtfqwoAa28c8wK+T7nvJFOjxI4h/7sORWcypRNxBpr+TPNaCfVWq70tLCJsqoFwcf0oI0JU/fvMQ=="], @@ -1019,8 +976,6 @@ "@types/react": ["@types/react@19.2.2", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="], - "@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="], - "@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="], "@types/ws": ["@types/ws@8.18.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg=="], @@ -1077,8 +1032,6 @@ "acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="], - "agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="], - "agentkeepalive": ["agentkeepalive@4.6.0", "", { "dependencies": { "humanize-ms": "^1.2.1" } }, "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ=="], "ai": ["ai@5.0.0-beta.21", "", { "dependencies": { "@ai-sdk/gateway": "1.0.0-beta.8", "@ai-sdk/provider": "2.0.0-beta.1", "@ai-sdk/provider-utils": "3.0.0-beta.3", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.49 || ^4" }, "bin": { "ai": "dist/bin/ai.min.js" } }, "sha512-ZmgUoEIXb2G2HLtK1U3UB+hSDa3qrVIeAfgXf3SIE9r5Vqj6xHG1pN/7fHIZDSgb1TCaypG0ANVB0O9WmnMfiw=="], @@ -1087,8 +1040,6 @@ "ansi-align": ["ansi-align@3.0.1", "", { "dependencies": { "string-width": "^4.1.0" } }, "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w=="], - "ansi-colors": ["ansi-colors@4.1.3", "", {}, "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw=="], - "ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], @@ -1187,8 +1138,6 @@ "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], - "c12": ["c12@2.0.1", "", { "dependencies": { "chokidar": "^4.0.1", "confbox": "^0.1.7", "defu": "^6.1.4", "dotenv": "^16.4.5", "giget": "^1.2.3", "jiti": "^2.3.0", "mlly": "^1.7.1", "ohash": "^1.1.4", "pathe": "^1.1.2", "perfect-debounce": "^1.0.0", "pkg-types": "^1.2.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "^0.3.5" }, "optionalPeers": ["magicast"] }, "sha512-Z4JgsKXHG37C6PYUtIxCfLJZvo6FyhHJoClwwb9ftUkLpPSkuYqn6Tr+vnaN8hymm0kIbcg6Ey3kv/Q71k5w/A=="], - "call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="], "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], @@ -1217,8 +1166,6 @@ "cipher-base": ["cipher-base@1.0.7", "", { "dependencies": { "inherits": "^2.0.4", "safe-buffer": "^5.2.1", "to-buffer": "^1.2.2" } }, "sha512-Mz9QMT5fJe7bKI7MH31UilT5cEK5EHHRCccw/YRFsRY47AuNgaV6HY3rscp0/I4Q+tTW/5zoqpSeRRI54TkDWA=="], - "citty": ["citty@0.1.6", "", { "dependencies": { "consola": "^3.2.3" } }, "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ=="], - "clean-git-ref": ["clean-git-ref@2.0.1", "", {}, "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw=="], "cli-boxes": ["cli-boxes@3.0.0", "", {}, "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g=="], @@ -1237,26 +1184,18 @@ "color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="], - "color-support": ["color-support@1.1.3", "", { "bin": { "color-support": "bin.js" } }, "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg=="], - "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], "comma-separated-tokens": ["comma-separated-tokens@2.0.3", "", {}, "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg=="], "command-exists": ["command-exists@1.2.9", "", {}, "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w=="], - "commander": ["commander@13.0.0", "", {}, "sha512-oPYleIY8wmTVzkvQq10AEok6YcTC4sRUBl8F9gVuwchGVUCTbl/vhLTaQqutuuySYOsu8YTgV+OxKc/8Yvx+mQ=="], - "common-ancestor-path": ["common-ancestor-path@1.0.1", "", {}, "sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w=="], "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], "concurrently": ["concurrently@9.2.1", "", { "dependencies": { "chalk": "4.1.2", "rxjs": "7.8.2", "shell-quote": "1.8.3", "supports-color": "8.1.1", "tree-kill": "1.2.2", "yargs": "17.7.2" }, "bin": { "conc": "dist/bin/concurrently.js", "concurrently": "dist/bin/concurrently.js" } }, "sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng=="], - "confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="], - - "consola": ["consola@3.4.2", "", {}, "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA=="], - "console-browserify": ["console-browserify@1.2.0", "", {}, "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA=="], "constants-browserify": ["constants-browserify@1.0.0", "", {}, "sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ=="], @@ -1299,14 +1238,10 @@ "cssesc": ["cssesc@3.0.0", "", { "bin": { "cssesc": "bin/cssesc" } }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="], - "cssstyle": ["cssstyle@4.6.0", "", { "dependencies": { "@asamuzakjp/css-color": "^3.2.0", "rrweb-cssom": "^0.8.0" } }, "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg=="], - "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], "data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="], - "data-urls": ["data-urls@5.0.0", "", { "dependencies": { "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.0.0" } }, "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg=="], - "date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="], "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], @@ -1369,10 +1304,6 @@ "domain-browser": ["domain-browser@4.22.0", "", {}, "sha512-IGBwjF7tNk3cwypFNH/7bfzBcgSCbaMOD3GsaY1AU/JRrnHnYgEM0+9kQt52iZxjNsjBtJYtao146V+f8jFZNw=="], - "dompurify": ["dompurify@3.2.6", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ=="], - - "dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], - "drizzle-arktype": ["drizzle-arktype@0.1.3", "", { "peerDependencies": { "arktype": ">=2.0.0", "drizzle-orm": ">=0.36.0" } }, "sha512-X66GB2pz7Nb+NmCZefDXpdoglxjGYnB2yRU5umAK2stVkl4rvV6i6XbMg1+w1HiY/ydC8gJVq4jKAARYazpb3g=="], "drizzle-kit": ["drizzle-kit@0.31.4", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-tCPWVZWZqWVx2XUsVpJRnH9Mx0ClVOf5YUHerZ5so1OKSlqww4zy1R5ksEdGRcO3tM3zj0PYN6V48TbQCL1RfA=="], @@ -1441,7 +1372,7 @@ "esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="], - "esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + "esrap": ["esrap@1.4.9", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-3OMlcd0a03UGuZpPeUC1HxR3nA23l+HEyCiZw3b3FumJIN9KphoGzDJKMXI1S72jVS1dsenDyQC0kJlO1U9E1g=="], "esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="], @@ -1497,7 +1428,7 @@ "fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="], - "fflate": ["fflate@0.4.8", "", {}, "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="], + "fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="], "file-entry-cache": ["file-entry-cache@8.0.0", "", { "dependencies": { "flat-cache": "^4.0.0" } }, "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="], @@ -1533,8 +1464,6 @@ "fresh": ["fresh@2.0.0", "", {}, "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A=="], - "fs-minipass": ["fs-minipass@2.1.0", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg=="], - "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], @@ -1555,8 +1484,6 @@ "get-tsconfig": ["get-tsconfig@4.10.1", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ=="], - "giget": ["giget@1.2.5", "", { "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "defu": "^6.1.4", "node-fetch-native": "^1.6.6", "nypm": "^0.5.4", "pathe": "^2.0.3", "tar": "^6.2.1" }, "bin": { "giget": "dist/cli.mjs" } }, "sha512-r1ekGw/Bgpi3HLV3h1MRBIlSAdHoIMklpaQ3OQLFcRw9PwAj2rqigvIbg+dBUI51OxVI2jsEtDywDBjSiuf7Ug=="], - "github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="], "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], @@ -1579,8 +1506,6 @@ "h3": ["h3@1.15.4", "", { "dependencies": { "cookie-es": "^1.2.2", "crossws": "^0.3.5", "defu": "^6.1.4", "destr": "^2.0.5", "iron-webcrypto": "^1.2.1", "node-mock-http": "^1.0.2", "radix3": "^1.1.2", "ufo": "^1.6.1", "uncrypto": "^0.1.3" } }, "sha512-z5cFQWDffyOe4vQ9xIqNfCZdV4p//vy6fBnr8Q1AWnVZ0teurKMG66rLj++TKwKPUP3u7iMUvrvKaEUiQw2QWQ=="], - "handlebars": ["handlebars@4.7.8", "", { "dependencies": { "minimist": "^1.2.5", "neo-async": "^2.6.2", "source-map": "^0.6.1", "wordwrap": "^1.0.0" }, "optionalDependencies": { "uglify-js": "^3.1.4" }, "bin": { "handlebars": "bin/handlebars" } }, "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ=="], - "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], "has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="], @@ -1621,8 +1546,6 @@ "hono-openapi": ["hono-openapi@0.4.8", "", { "dependencies": { "json-schema-walker": "^2.0.0" }, "peerDependencies": { "@hono/arktype-validator": "^2.0.0", "@hono/effect-validator": "^1.2.0", "@hono/typebox-validator": "^0.2.0 || ^0.3.0", "@hono/valibot-validator": "^0.5.1", "@hono/zod-validator": "^0.4.1", "@sinclair/typebox": "^0.34.9", "@valibot/to-json-schema": "^1.0.0-beta.3", "arktype": "^2.0.0", "effect": "^3.11.3", "hono": "^4.6.13", "openapi-types": "^12.1.3", "valibot": "^1.0.0-beta.9", "zod": "^3.23.8", "zod-openapi": "^4.0.0" }, "optionalPeers": ["@hono/arktype-validator", "@hono/effect-validator", "@hono/typebox-validator", "@hono/valibot-validator", "@hono/zod-validator", "@sinclair/typebox", "@valibot/to-json-schema", "arktype", "effect", "hono", "valibot", "zod", "zod-openapi"] }, "sha512-LYr5xdtD49M7hEAduV1PftOMzuT8ZNvkyWfh1DThkLsIr4RkvDb12UxgIiFbwrJB6FLtFXLoOZL9x4IeDk2+VA=="], - "html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="], - "html-escaper": ["html-escaper@3.0.3", "", {}, "sha512-RuMffC89BOWQoY0WKGpIhn5gX3iI54O6nRA0yC124NYVtzjmFWBIiFd8M0x+ZdX0P9R4lADg1mgP8C7PxGOWuQ=="], "html-void-elements": ["html-void-elements@3.0.0", "", {}, "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg=="], @@ -1631,12 +1554,8 @@ "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], - "http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="], - "https-browserify": ["https-browserify@1.0.0", "", {}, "sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg=="], - "https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="], - "human-signals": ["human-signals@2.1.0", "", {}, "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw=="], "humanize-ms": ["humanize-ms@1.2.1", "", { "dependencies": { "ms": "^2.0.0" } }, "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ=="], @@ -1689,8 +1608,6 @@ "is-plain-obj": ["is-plain-obj@4.1.0", "", {}, "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg=="], - "is-potential-custom-element-name": ["is-potential-custom-element-name@1.0.1", "", {}, "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ=="], - "is-promise": ["is-promise@4.0.0", "", {}, "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ=="], "is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="], @@ -1707,8 +1624,6 @@ "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], - "isomorphic-dompurify": ["isomorphic-dompurify@2.26.0", "", { "dependencies": { "dompurify": "^3.2.6", "jsdom": "^26.1.0" } }, "sha512-nZmoK4wKdzPs5USq4JHBiimjdKSVAOm2T1KyDoadtMPNXYHxiENd19ou4iU/V4juFM6LVgYQnpxCYmxqNP4Obw=="], - "isomorphic-git": ["isomorphic-git@1.32.1", "", { "dependencies": { "async-lock": "^1.4.1", "clean-git-ref": "^2.0.1", "crc-32": "^1.2.0", "diff3": "0.0.3", "ignore": "^5.1.4", "minimisted": "^2.0.0", "pako": "^1.0.10", "path-browserify": "^1.0.1", "pify": "^4.0.1", "readable-stream": "^3.4.0", "sha.js": "^2.4.9", "simple-get": "^4.0.1" }, "bin": { "isogit": "cli.cjs" } }, "sha512-NZCS7qpLkCZ1M/IrujYBD31sM6pd/fMVArK4fz4I7h6m0rUW2AsYU7S7zXeABuHL6HIfW6l53b4UQ/K441CQjg=="], "isomorphic-timers-promises": ["isomorphic-timers-promises@1.0.1", "", {}, "sha512-u4sej9B1LPSxTGKB/HiuzvEQnXH0ECYkSVQU39koSwmFAxhlEAFl9RdTvLv4TOTQUgBS5O3O5fwUxk6byBZ+IQ=="], @@ -1721,8 +1636,6 @@ "js-yaml": ["js-yaml@3.14.1", "", { "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g=="], - "jsdom": ["jsdom@26.1.0", "", { "dependencies": { "cssstyle": "^4.2.1", "data-urls": "^5.0.0", "decimal.js": "^10.5.0", "html-encoding-sniffer": "^4.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.16", "parse5": "^7.2.1", "rrweb-cssom": "^0.8.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", "tough-cookie": "^5.1.1", "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^7.0.0", "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.1.1", "ws": "^8.18.0", "xml-name-validator": "^5.0.0" }, "peerDependencies": { "canvas": "^3.0.0" }, "optionalPeers": ["canvas"] }, "sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg=="], - "json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="], "json-schema": ["json-schema@0.4.0", "", {}, "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="], @@ -1799,8 +1712,6 @@ "markdown-table": ["markdown-table@3.0.4", "", {}, "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw=="], - "marked": ["marked@16.2.0", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-LbbTuye+0dWRz2TS9KJ7wsnD4KAtpj0MVkWc90XvBa6AslXsT0hTBVH5k32pcSyHH1fst9XEFJunXHktVy0zlg=="], - "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], "md5.js": ["md5.js@1.3.5", "", { "dependencies": { "hash-base": "^3.0.0", "inherits": "^2.0.1", "safe-buffer": "^5.1.2" } }, "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg=="], @@ -1929,8 +1840,6 @@ "mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], - "mlly": ["mlly@1.7.4", "", { "dependencies": { "acorn": "^8.14.0", "pathe": "^2.0.1", "pkg-types": "^1.3.0", "ufo": "^1.5.4" } }, "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw=="], - "mode-watcher": ["mode-watcher@1.1.0", "", { "dependencies": { "runed": "^0.25.0", "svelte-toolbelt": "^0.7.1" }, "peerDependencies": { "svelte": "^5.27.0" } }, "sha512-mUT9RRGPDYenk59qJauN1rhsIMKBmWA3xMF+uRwE8MW/tjhaDSCCARqkSuDTq8vr4/2KcAxIGVjACxTjdk5C3g=="], "mri": ["mri@1.2.0", "", {}, "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA=="], @@ -1949,8 +1858,6 @@ "negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], - "neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="], - "neotraverse": ["neotraverse@0.6.18", "", {}, "sha512-Z4SmBUweYa09+o6pG+eASabEpP6QkQ70yHj351pQoEXIs8uHbaU2DWVmzBANKgflPa47A50PtB2+NgRpQvr7vA=="], "nlcst-to-string": ["nlcst-to-string@4.0.0", "", { "dependencies": { "@types/nlcst": "^2.0.0" } }, "sha512-YKLBCcUYKAg0FNlOBT6aI91qFmSiFKiluk655WzPF+DDMA02qIyy8uiRqI8QXtcFpEvll12LpL5MXqEmAZ+dcA=="], @@ -1975,10 +1882,6 @@ "npm-run-path": ["npm-run-path@4.0.1", "", { "dependencies": { "path-key": "^3.0.0" } }, "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw=="], - "nwsapi": ["nwsapi@2.2.21", "", {}, "sha512-o6nIY3qwiSXl7/LuOU0Dmuctd34Yay0yeuZRLFmDPrrdHpXKFndPj3hM+YEPVHYC5fx2otBx4Ilc/gyYSAUaIA=="], - - "nypm": ["nypm@0.5.4", "", { "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "pathe": "^2.0.3", "pkg-types": "^1.3.1", "tinyexec": "^0.3.2", "ufo": "^1.5.4" }, "bin": { "nypm": "dist/cli.mjs" } }, "sha512-X0SNNrZiGU8/e/zAB7sCTtdxWTMSIO73q+xuKgglm2Yvzwlo8UoC5FNySQFCvl84uPaeADkqHUZUkWy4aH4xOA=="], - "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], "object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], @@ -2059,8 +1962,6 @@ "pbkdf2": ["pbkdf2@3.1.5", "", { "dependencies": { "create-hash": "^1.2.0", "create-hmac": "^1.1.7", "ripemd160": "^2.0.3", "safe-buffer": "^5.2.1", "sha.js": "^2.4.12", "to-buffer": "^1.2.1" } }, "sha512-Q3CG/cYvCO1ye4QKkuH7EXxs3VC/rI1/trd+qX2+PolbaKG0H+bgcZzrTt96mMyRtejk+JMCiLUn3y29W8qmFQ=="], - "perfect-debounce": ["perfect-debounce@1.0.0", "", {}, "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA=="], - "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], "pg-protocol": ["pg-protocol@1.10.3", "", {}, "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ=="], @@ -2077,8 +1978,6 @@ "pkg-dir": ["pkg-dir@5.0.0", "", { "dependencies": { "find-up": "^5.0.0" } }, "sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA=="], - "pkg-types": ["pkg-types@1.3.1", "", { "dependencies": { "confbox": "^0.1.8", "mlly": "^1.7.4", "pathe": "^2.0.1" } }, "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ=="], - "platform": ["platform@1.3.6", "", {}, "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg=="], "possible-typed-array-names": ["possible-typed-array-names@1.1.0", "", {}, "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg=="], @@ -2155,8 +2054,6 @@ "raw-body": ["raw-body@3.0.0", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.6.3", "unpipe": "1.0.0" } }, "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g=="], - "rc9": ["rc9@2.1.2", "", { "dependencies": { "defu": "^6.1.4", "destr": "^2.0.3" } }, "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg=="], - "readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="], "readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], @@ -2219,8 +2116,6 @@ "router": ["router@2.2.0", "", { "dependencies": { "debug": "^4.4.0", "depd": "^2.0.0", "is-promise": "^4.0.0", "parseurl": "^1.3.3", "path-to-regexp": "^8.0.0" } }, "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ=="], - "rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="], - "run-applescript": ["run-applescript@7.0.0", "", {}, "sha512-9by4Ij99JUr/MCFBUkDKLWK3G9HVXmabKz9U5MlIAIuvuzkiOicRYs8XJLxX+xahD+mLiiCYDqF9dKAgtzKP1A=="], "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], @@ -2241,8 +2136,6 @@ "sass-formatter": ["sass-formatter@0.7.9", "", { "dependencies": { "suf-log": "^2.5.3" } }, "sha512-CWZ8XiSim+fJVG0cFLStwDvft1VI7uvXdCNJYXhDvowiv+DsbD1nXLiQ4zrE5UBvj5DWZJ93cwN0NX5PMsr1Pw=="], - "saxes": ["saxes@6.0.0", "", { "dependencies": { "xmlchars": "^2.2.0" } }, "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA=="], - "section-matter": ["section-matter@1.0.0", "", { "dependencies": { "extend-shallow": "^2.0.1", "kind-of": "^6.0.0" } }, "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA=="], "semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], @@ -2285,8 +2178,6 @@ "simple-get": ["simple-get@4.0.1", "", { "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA=="], - "simple-icons": ["simple-icons@15.11.0", "", {}, "sha512-hHgDvNcbIdE5e6thY19Ao1VEI4CectXDNB0+nXOLCBf3ApuzeMm4tAhWzeR9qZdt/GoeQs1nm9JTVzCVBuX1nA=="], - "simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="], "sirv": ["sirv@3.0.1", "", { "dependencies": { "@polka/url": "^1.0.0-next.24", "mrmime": "^2.0.0", "totalist": "^3.0.0" } }, "sha512-FoqMu0NCGBLCcAkS1qA+XJIQTR6/JHfQXl+uGteNCQ76T91DMUjPa9xfmeqMY3z80nLSg9yQmNjK0Px6RWsH/A=="], @@ -2335,7 +2226,7 @@ "supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="], - "svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + "svelte": ["svelte@5.14.4", "", { "dependencies": { "@ampproject/remapping": "^2.3.0", "@jridgewell/sourcemap-codec": "^1.5.0", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "acorn-typescript": "^1.4.13", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "esm-env": "^1.2.1", "esrap": "^1.3.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-2iR/UHHA2Dsldo4JdXDcdqT+spueuh+uNYw1FoTKBbpnFEECVISeqSo0uubPS4AfBE0xI6u7DGHxcdq3DTDmoQ=="], "svelte-check": ["svelte-check@4.3.1", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-lkh8gff5gpHLjxIV+IaApMxQhTGnir2pNUAqcNgeKkvK5bT/30Ey/nzBxNLDlkztCH4dP7PixkMt9SWEKFPBWg=="], @@ -2347,8 +2238,6 @@ "svelte2tsx": ["svelte2tsx@0.7.42", "", { "dependencies": { "dedent-js": "^1.0.1", "pascal-case": "^3.1.1" }, "peerDependencies": { "svelte": "^3.55 || ^4.0.0-next.0 || ^4.0 || ^5.0.0-next.0", "typescript": "^4.9.4 || ^5.0.0" } }, "sha512-PSNrKS16aVdAajoFjpF5M0t6TA7ha7GcKbBajD9RG3M+vooAuvLnWAGUSC6eJL4zEOVbOWKtcS2BuY4rxPljoA=="], - "symbol-tree": ["symbol-tree@3.2.4", "", {}, "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw=="], - "tabbable": ["tabbable@6.2.0", "", {}, "sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew=="], "tailwind-merge": ["tailwind-merge@3.3.1", "", {}, "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g=="], @@ -2371,21 +2260,17 @@ "tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="], - "tldts": ["tldts@6.1.86", "", { "dependencies": { "tldts-core": "^6.1.86" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ=="], - - "tldts-core": ["tldts-core@6.1.86", "", {}, "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA=="], - "to-buffer": ["to-buffer@1.2.1", "", { "dependencies": { "isarray": "^2.0.5", "safe-buffer": "^5.2.1", "typed-array-buffer": "^1.0.3" } }, "sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ=="], "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], - "totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="], + "toml": ["toml@3.0.0", "", {}, "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w=="], - "tough-cookie": ["tough-cookie@5.1.2", "", { "dependencies": { "tldts": "^6.1.32" } }, "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A=="], + "totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="], - "tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="], + "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], "tree-kill": ["tree-kill@1.2.2", "", { "bin": { "tree-kill": "cli.js" } }, "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A=="], @@ -2403,6 +2288,8 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "tsx": ["tsx@4.20.6", "", { "dependencies": { "esbuild": "~0.25.0", "get-tsconfig": "^4.7.5" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "bin": { "tsx": "dist/cli.mjs" } }, "sha512-ytQKuwgmrrkDTFP4LjR0ToE2nqgy886GpvRSpU0JAnrdBYppuY5rLkRUYPU1yCryb24SsKBTL/hlDQAEFVwtZg=="], + "tty-browserify": ["tty-browserify@0.0.1", "", {}, "sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw=="], "tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="], @@ -2443,8 +2330,6 @@ "ufo": ["ufo@1.6.1", "", {}, "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA=="], - "uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="], - "ultrahtml": ["ultrahtml@1.6.0", "", {}, "sha512-R9fBn90VTJrqqLDwyMph+HGne8eqY1iPfYhPzZrvKpIfwkWZbcYlfpsb8B9dTvBfpy1/hqAD7Wi8EKfP9e8zdw=="], "uncrypto": ["uncrypto@0.1.3", "", {}, "sha512-Ql87qFHB3s/De2ClA9e0gsnS6zXG27SkTiSJwjCc9MebbfapQfuPzumMIUMi38ezPZVNFcHI9sUIepeQfw8J8Q=="], @@ -2503,6 +2388,8 @@ "vaul-svelte": ["vaul-svelte@1.0.0-next.7", "", { "dependencies": { "runed": "^0.23.2", "svelte-toolbelt": "^0.7.1" }, "peerDependencies": { "svelte": "^5.0.0" } }, "sha512-7zN7Bi3dFQixvvbUJY9uGDe7Ws/dGZeBQR2pXdXmzQiakjrxBvWo0QrmsX3HK+VH+SZOltz378cmgmCS9f9rSg=="], + "vault-demo": ["vault-demo@workspace:apps/vault-demo"], + "vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q=="], "vfile-location": ["vfile-location@5.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile": "^6.0.0" } }, "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg=="], @@ -2553,23 +2440,17 @@ "vscode-uri": ["vscode-uri@3.1.0", "", {}, "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ=="], - "w3c-xmlserializer": ["w3c-xmlserializer@5.0.0", "", { "dependencies": { "xml-name-validator": "^5.0.0" } }, "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA=="], - "web-namespaces": ["web-namespaces@2.0.1", "", {}, "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ=="], "web-streams-polyfill": ["web-streams-polyfill@4.0.0-beta.3", "", {}, "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug=="], "web-vitals": ["web-vitals@4.2.4", "", {}, "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw=="], - "webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="], + "webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], "wellcrafted": ["wellcrafted@0.22.0", "", {}, "sha512-qlAZ8NYJf+kOpqu+LHW3byMuy80yDex/B0fe4GoI5ou6v+20Jaj0R1jrvCF77edrHVgoXaK1774vni3ZPxH8HA=="], - "whatwg-encoding": ["whatwg-encoding@3.1.1", "", { "dependencies": { "iconv-lite": "0.6.3" } }, "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ=="], - - "whatwg-mimetype": ["whatwg-mimetype@4.0.0", "", {}, "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg=="], - - "whatwg-url": ["whatwg-url@14.2.0", "", { "dependencies": { "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" } }, "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw=="], + "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], @@ -2581,8 +2462,6 @@ "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], - "wordwrap": ["wordwrap@1.0.0", "", {}, "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="], - "workerd": ["workerd@1.20250816.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20250816.0", "@cloudflare/workerd-darwin-arm64": "1.20250816.0", "@cloudflare/workerd-linux-64": "1.20250816.0", "@cloudflare/workerd-linux-arm64": "1.20250816.0", "@cloudflare/workerd-windows-64": "1.20250816.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-5gIvHPE/3QVlQR1Sc1NdBkWmqWj/TSgIbY/f/qs9lhiLBw/Da+HbNBTVYGjvwYqEb3NQ+XQM4gAm5b2+JJaUJg=="], "wrangler": ["wrangler@4.31.0", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.0", "@cloudflare/unenv-preset": "2.6.2", "blake3-wasm": "2.1.5", "esbuild": "0.25.4", "miniflare": "4.20250816.0", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.19", "workerd": "1.20250816.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20250816.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-blb8NfA4BGscvSzvLm2mEQRuUTmaMCiglkqHiR3EIque78UXG39xxVtFXlKhK32qaVvGI7ejdM//HC9plVPO3w=="], @@ -2595,10 +2474,6 @@ "xdg-basedir": ["xdg-basedir@5.1.0", "", {}, "sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ=="], - "xml-name-validator": ["xml-name-validator@5.0.0", "", {}, "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg=="], - - "xmlchars": ["xmlchars@2.2.0", "", {}, "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw=="], - "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], "xxhash-wasm": ["xxhash-wasm@1.1.0", "", {}, "sha512-147y/6YNh+tlp6nd/2pWq38i9h6mz/EuQ6njIrmW8D1BS5nCqs0P6DG+m6zTGnNz5I+uhZ0SHxBs9BsPrwcKDA=="], @@ -2607,7 +2482,7 @@ "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], - "yaml": ["yaml@1.10.2", "", {}, "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg=="], + "yaml": ["yaml@2.8.1", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw=="], "yaml-language-server": ["yaml-language-server@1.15.0", "", { "dependencies": { "ajv": "^8.11.0", "lodash": "4.17.21", "request-light": "^0.5.7", "vscode-json-languageservice": "4.1.8", "vscode-languageserver": "^7.0.0", "vscode-languageserver-textdocument": "^1.0.1", "vscode-languageserver-types": "^3.16.0", "vscode-nls": "^5.0.0", "vscode-uri": "^3.0.2", "yaml": "2.2.2" }, "optionalDependencies": { "prettier": "2.8.7" }, "bin": { "yaml-language-server": "bin/yaml-language-server" } }, "sha512-N47AqBDCMQmh6mBLmI6oqxryHRzi33aPFPsJhYy3VTUGCdLHYjGh4FZzpUjRlphaADBBkDmnkM/++KNIOHi5Rw=="], @@ -2651,8 +2526,6 @@ "@astrojs/svelte/vite": ["vite@6.3.5", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ=="], - "@astrojs/yaml2ts/yaml": ["yaml@2.8.1", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw=="], - "@cspotcode/source-map-support/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="], "@epicenter/opencode/hono": ["hono@4.7.10", "", {}, "sha512-QkACju9MiN59CKSY5JsGZCYmPZkA6sIW6OFCUp7qDjZu6S6KHtJHhAc9Uy9mV9F8PJ1/HQ3ybZF2yjCa/73fvQ=="], @@ -2673,8 +2546,6 @@ "@eslint/eslintrc/js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], - "@hey-api/json-schema-ref-parser/js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], - "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], "@libsql/hrana-client/node-fetch": ["node-fetch@3.3.2", "", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA=="], @@ -2723,10 +2594,12 @@ "@repo/constants/@types/node": ["@types/node@20.19.11", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-uug3FEEGv0r+jrecvUUpbY8lLisvIjg6AAic6a2bSP5OEOLeJsDSnvhCDov7ipFFMXS3orMpzlmi0ZcuGkBbow=="], - "@repo/svelte-utils/svelte": ["svelte@5.14.4", "", { "dependencies": { "@ampproject/remapping": "^2.3.0", "@jridgewell/sourcemap-codec": "^1.5.0", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "acorn-typescript": "^1.4.13", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "esm-env": "^1.2.1", "esrap": "^1.3.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-2iR/UHHA2Dsldo4JdXDcdqT+spueuh+uNYw1FoTKBbpnFEECVISeqSo0uubPS4AfBE0xI6u7DGHxcdq3DTDmoQ=="], - "@repo/ui/@lucide/svelte": ["@lucide/svelte@0.525.0", "", { "peerDependencies": { "svelte": "^5" } }, "sha512-dyUxkXzepagLUzL8jHQNdeH286nC66ClLACsg+Neu/bjkRJWPWMzkT+H0DKlE70QdkicGCfs1ZGmXCc351hmZA=="], + "@repo/ui/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + + "@repo/whispering/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + "@rollup/plugin-inject/estree-walker": ["estree-walker@2.0.2", "", {}, "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="], "@rollup/pluginutils/estree-walker": ["estree-walker@2.0.2", "", {}, "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="], @@ -2747,6 +2620,10 @@ "@tailwindcss/typography/postcss-selector-parser": ["postcss-selector-parser@6.0.10", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w=="], + "@tanstack/svelte-query/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + + "@tanstack/svelte-query-devtools/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + "@types/fontkit/@types/node": ["@types/node@22.17.2", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-gL6z5N9Jm9mhY+U2KXZpteb+09zyffliRkZyZOHODGATyC5B1Jt/7TzuuiLkFsSUMLbS1OLmlj/E+/3KF4Q/4w=="], "@types/node-fetch/@types/node": ["@types/node@22.17.2", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-gL6z5N9Jm9mhY+U2KXZpteb+09zyffliRkZyZOHODGATyC5B1Jt/7TzuuiLkFsSUMLbS1OLmlj/E+/3KF4Q/4w=="], @@ -2795,6 +2672,8 @@ "bits-ui/runed": ["runed@0.29.2", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-0cq6cA6sYGZwl/FvVqjx9YN+1xEBu9sDDyuWdDW1yWX7JF2wmvmVKfH+hVCZs+csW+P3ARH92MjI3H9QTagOQA=="], + "bits-ui/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + "bits-ui/svelte-toolbelt": ["svelte-toolbelt@0.9.3", "", { "dependencies": { "clsx": "^2.1.1", "runed": "^0.29.0", "style-to-object": "^1.0.8" }, "peerDependencies": { "svelte": "^5.30.2" } }, "sha512-HCSWxCtVmv+c6g1ACb8LTwHVbDqLKJvHpo6J8TaqwUme2hj9ATJCpjCPNISR1OCq2Q4U1KT41if9ON0isINQZw=="], "boxen/chalk": ["chalk@5.6.0", "", {}, "sha512-46QrSQFyVSEyYAgQ22hQ+zDa60YHA4fBstHmtSApj1Y5vKtG27fWowW03jCk5KcbXEWPZUIR894aARCA/G1kfQ=="], @@ -2805,10 +2684,6 @@ "browserify-sign/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], - "c12/ohash": ["ohash@1.1.6", "", {}, "sha512-TBu7PtV8YkAZn0tSxobKY2n2aAQva936lhRrj6957aDaCf9IEtqsKbgMzXE/F/sjqYOwmrukeORHNLe5glk7Cg=="], - - "c12/pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="], - "chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "cipher-base/to-buffer": ["to-buffer@1.2.2", "", { "dependencies": { "isarray": "^2.0.5", "safe-buffer": "^5.2.1", "typed-array-buffer": "^1.0.3" } }, "sha512-db0E3UJjcFhpDhAF4tLo03oli3pwl3dbnzXOUIlRKrp+ldk/VUxzpWYZENsw2SZiuBjHAk7DfB0VU7NKdpb6sw=="], @@ -2819,6 +2694,8 @@ "elliptic/bn.js": ["bn.js@4.12.2", "", {}, "sha512-n4DSx829VRTRByMRGdjQ9iqsN0Bh4OolPsFnaZBLcbi8iXcB+kJ9s7EnRt4wILZNV3kPLHkRVfOc/HvhC3ovDw=="], + "epicenter/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + "eslint-plugin-svelte/svelte-eslint-parser": ["svelte-eslint-parser@1.3.1", "", { "dependencies": { "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.0.0", "espree": "^10.0.0", "postcss": "^8.4.49", "postcss-scss": "^4.0.9", "postcss-selector-parser": "^7.0.0" }, "peerDependencies": { "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" }, "optionalPeers": ["svelte"] }, "sha512-0Iztj5vcOVOVkhy1pbo5uA9r+d3yaVoE5XPc9eABIWDOSJZ2mOsZ4D+t45rphWCOr0uMw3jtSG2fh2e7GvKnPg=="], "express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], @@ -2829,10 +2706,6 @@ "fetch-blob/web-streams-polyfill": ["web-streams-polyfill@3.3.3", "", {}, "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw=="], - "fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "giget/tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="], - "groq-sdk/@types/node": ["@types/node@18.19.123", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-K7DIaHnh0mzVxreCR9qwgNxp3MH9dltPNIEddW9MYUlcKAzm+3grKNSTe2vCJHI1FaLpvpL5JGJrz1UZDKYvDg=="], "groq-sdk/form-data-encoder": ["form-data-encoder@1.7.2", "", {}, "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A=="], @@ -2845,10 +2718,6 @@ "isomorphic-git/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - "jsdom/decimal.js": ["decimal.js@10.6.0", "", {}, "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg=="], - - "jsdom/ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="], - "lightningcss/detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="], "md5.js/hash-base": ["hash-base@3.1.2", "", { "dependencies": { "inherits": "^2.0.4", "readable-stream": "^2.3.8", "safe-buffer": "^5.2.1", "to-buffer": "^1.2.1" } }, "sha512-Bb33KbowVTIj5s7Ked1OsqHUeCpz//tPwR+E2zJgJKo9Z5XolZ9b6bdUgjmYlwnWhoOQKoTd1TYToZGn5mAYOg=="], @@ -2865,7 +2734,7 @@ "mode-watcher/runed": ["runed@0.25.0", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-7+ma4AG9FT2sWQEA0Egf6mb7PBT2vHyuHail1ie8ropfSjvZGtEAx8YTmUjv/APCsdRRxEVvArNjALk9zFSOrg=="], - "node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], + "mode-watcher/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], "node-stdlib-browser/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], @@ -2873,10 +2742,16 @@ "paneforge/runed": ["runed@0.23.4", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-9q8oUiBYeXIDLWNK5DfCWlkL0EW3oGbk845VdKlPeia28l751VpfesaB/+7pI6rnbx1I6rqoZ2fZxptOJLxILA=="], + "paneforge/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + "paneforge/svelte-toolbelt": ["svelte-toolbelt@0.9.3", "", { "dependencies": { "clsx": "^2.1.1", "runed": "^0.29.0", "style-to-object": "^1.0.8" }, "peerDependencies": { "svelte": "^5.30.2" } }, "sha512-HCSWxCtVmv+c6g1ACb8LTwHVbDqLKJvHpo6J8TaqwUme2hj9ATJCpjCPNISR1OCq2Q4U1KT41if9ON0isINQZw=="], "postcss/nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], + "postcss-load-config/yaml": ["yaml@1.10.2", "", {}, "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg=="], + + "posthog-js/fflate": ["fflate@0.4.8", "", {}, "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="], + "prompts/kleur": ["kleur@3.0.3", "", {}, "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w=="], "protobufjs/@types/node": ["@types/node@22.17.2", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-gL6z5N9Jm9mhY+U2KXZpteb+09zyffliRkZyZOHODGATyC5B1Jt/7TzuuiLkFsSUMLbS1OLmlj/E+/3KF4Q/4w=="], @@ -2905,8 +2780,6 @@ "tauri-plugin-macos-permissions-api/@tauri-apps/api": ["@tauri-apps/api@2.8.0", "", {}, "sha512-ga7zdhbS2GXOMTIZRT0mYjKJtR9fivsXzsyq5U3vjDL0s6DTMwYRm0UHNjzTY5dh4+LSC68Sm/7WEiimbQNYlw=="], - "tr46/punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], - "type-is/mime-types": ["mime-types@3.0.1", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA=="], "typescript-eslint/@typescript-eslint/parser": ["@typescript-eslint/parser@8.40.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.40.0", "@typescript-eslint/types": "8.40.0", "@typescript-eslint/typescript-estree": "8.40.0", "@typescript-eslint/visitor-keys": "8.40.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-jCNyAuXx8dr5KJMkecGmZ8KI61KBUhkCob+SD+C+I5+Y1FWI2Y3QmY4/cxMCC5WAsZqoEtEETVhUiUMIGCf6Bw=="], @@ -2919,6 +2792,8 @@ "vaul-svelte/runed": ["runed@0.23.4", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-9q8oUiBYeXIDLWNK5DfCWlkL0EW3oGbk845VdKlPeia28l751VpfesaB/+7pI6rnbx1I6rqoZ2fZxptOJLxILA=="], + "vault-demo/svelte": ["svelte@5.38.6", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "esm-env": "^1.2.1", "esrap": "^2.1.0", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-ltBPlkvqk3bgCK7/N323atUpP3O3Y+DrGV4dcULrsSn4fZaaNnOmdplNznwfdWclAgvSr5rxjtzn/zJhRm6TKg=="], + "vscode-languageserver-protocol/vscode-jsonrpc": ["vscode-jsonrpc@8.2.0", "", {}, "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA=="], "widest-line/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], @@ -2993,8 +2868,6 @@ "@eslint/eslintrc/js-yaml/argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], - "@hey-api/json-schema-ref-parser/js-yaml/argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], - "@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], "@octokit/endpoint/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], @@ -3027,7 +2900,13 @@ "@octokit/rest/@octokit/core/before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="], - "@repo/svelte-utils/svelte/esrap": ["esrap@1.4.9", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-3OMlcd0a03UGuZpPeUC1HxR3nA23l+HEyCiZw3b3FumJIN9KphoGzDJKMXI1S72jVS1dsenDyQC0kJlO1U9E1g=="], + "@repo/ui/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + + "@repo/whispering/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + + "@tanstack/svelte-query-devtools/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + + "@tanstack/svelte-query/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], "@typescript-eslint/eslint-plugin/@typescript-eslint/scope-manager/@typescript-eslint/types": ["@typescript-eslint/types@8.40.0", "", {}, "sha512-ETdbFlgbAmXHyFPwqUIYrfc12ArvpBhEVgGAxVYSwli26dn8Ko+lIo4Su9vI9ykTZdJn+vJprs/0eZU0YMAEQg=="], @@ -3059,6 +2938,8 @@ "astro/js-yaml/argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], + "bits-ui/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + "boxen/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], "boxen/string-width/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], @@ -3073,19 +2954,9 @@ "browserify-sign/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], - "express/mime-types/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], - - "fs-minipass/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - - "giget/tar/chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="], - - "giget/tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="], + "epicenter/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], - "giget/tar/minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="], - - "giget/tar/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], - - "giget/tar/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "express/mime-types/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], "groq-sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="], @@ -3093,12 +2964,12 @@ "md5.js/hash-base/to-buffer": ["to-buffer@1.2.2", "", { "dependencies": { "isarray": "^2.0.5", "safe-buffer": "^5.2.1", "typed-array-buffer": "^1.0.3" } }, "sha512-db0E3UJjcFhpDhAF4tLo03oli3pwl3dbnzXOUIlRKrp+ldk/VUxzpWYZENsw2SZiuBjHAk7DfB0VU7NKdpb6sw=="], - "node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], - - "node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], + "mode-watcher/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], "p-locate/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], + "paneforge/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + "paneforge/svelte-toolbelt/runed": ["runed@0.29.2", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-0cq6cA6sYGZwl/FvVqjx9YN+1xEBu9sDDyuWdDW1yWX7JF2wmvmVKfH+hVCZs+csW+P3ARH92MjI3H9QTagOQA=="], "ripemd160/hash-base/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], @@ -3125,6 +2996,8 @@ "typescript-eslint/@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + "vault-demo/svelte/esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + "widest-line/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], "widest-line/string-width/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], @@ -3153,8 +3026,6 @@ "boxen/wrap-ansi/strip-ansi/ansi-regex": ["ansi-regex@6.2.0", "", {}, "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg=="], - "giget/tar/minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - "md5.js/hash-base/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], "md5.js/hash-base/readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], diff --git a/docs/specs/20250819T155643-vault-architecture.md b/docs/specs/20250819T155643-vault-architecture.md new file mode 100644 index 0000000000..d6289ee4f6 --- /dev/null +++ b/docs/specs/20250819T155643-vault-architecture.md @@ -0,0 +1,84 @@ +# Vault Architecture Overview + +## Discussion + +After a long discussion, we settled on loose requirements for the vault architectures. There was extensive discussion over the user of nano-ids, document-based storage, and having multiple data ingest layers. + +### Key Requirements + +After careful consideration, the following were established as key requirements. + +- **Portability**: Data needs to be serializable/deserializable at the filesystem level. +- **Transparency**: The user can easily inspect the data and its structure. +- **Synchronization**: Data should be syncronizable across devices using a reliable mechanism. + +Some conclusions on the above: + +#### Portability + +Concrete requirements for portability are slightly vague for now. We verbally settled on a "push pull" model, wherein data can be externally reset or pushed to a remote source. See [synchronization](#synchronization) for more details. + +Due to changing requirements, a composable strategy should be adopted, in order to remain flexible and minimize churn in the future. + +#### Transparency + +While SQLite is **not** a proprietary storage format, it was deemed "too opaque" for our needs. We want to ensure that users can easily understand and interact with the data, and synchronize it via a CLI tool. + +#### Synchronization + +One of our maintainers informed us of a few software-based solutions that could be used for synchronization, but expressed their concerns about sustainability. Our primary target for synchronization is Git, while attempting to avoid the possibility of merge conflicts. + +### Strategy + +Main discussion revolved around these two concepts: + +- SQLite-first +- Document-first + +While a document-first approach significantly increases complexity through side effects, it was decided that this would be the target approach moving forward, due to the aforementioned requirements. + +### Considerations + +It is worth noting that our current plan is considered naive. + +- Many problems still unaccounted for. +- Previous preferences/tooling choices may need to be revisited. +- Implementation details are still vague. +- User preferences are not yet fully understood. + +## Architecture + +The `vault-core` package separates responsibilities into: + +- VaultClient: Runs in the app (web/desktop). Holds Adapters for schema/metadata only. Uses RPC to talk to VaultService for operations. +- VaultService: Runs on a server or sidecar process. Holds Importers which implement parse/validate/upsert and own DB/migrations. + +### Key concepts + +- Adapter: Schema-only (drizzle schema and drizzleConfig). Optionally metadata for column descriptions. +- Importer: Encapsulates one data source workflow: id, name, adapter, validator, parse(blob), upsert(db, parsed), and optional views. +- Service DB: The service owns the database connection and migration function. + +### Suggested RPC pattern (to be implemented by host app) + +Define a minimal protocol to connect client and service. The exact transport is undetermined and left to the host app. A basic shape could include: + +- importers.list -> returns available importers (id, name) +- importers.migrate { id } -> triggers service-side migration for a single importer +- import.importBlob { id, file } -> uploads a blob and triggers parse+upsert on the service +- schema.describe { id } -> returns human-readable schema info for an adapter (optional; client can compute this locally from Adapter if bundled) + +### Lifecycle + +- Client constructs VaultClient with a list of Adapters for type-safe UI and schema introspection. +- Service constructs VaultService with Importers and a DB connection. On startup, it runs migrations for all importers (or selectively). +- Client submits a requests via RPC. Service parses with the selected importer, validates, executes, and responds. +- Synchronization needs to occur via an interface, configured by the host. + +### Synchronization + +WIP + +### Others + +Details like transport, auth, streaming, retries, and backpressure are left TBD. diff --git a/docs/specs/20251003T220750 vault-core-minimal-overview.md b/docs/specs/20251003T220750 vault-core-minimal-overview.md new file mode 100644 index 0000000000..e9b0b032c8 --- /dev/null +++ b/docs/specs/20251003T220750 vault-core-minimal-overview.md @@ -0,0 +1,117 @@ +# Vault Core Minimal Overview + +- Date: 2025-10-03 +- Updated: 2025-10-20 +- Status: Draft +- Owner: Vault core maintainers + +## Architecture Snapshot + +- Adapters expose prefixed Drizzle schema plus: + - versions: ordered tuple array of { tag: '0000', sql: string[] } + - transforms: registry keyed by non-baseline tags + - validator: Standard Schema parser for adapter payloads + - ingestors (optional): file parsers returning validator-ready payloads + - metadata: table and column descriptions (typed via AdapterMetadata) +- Vault orchestrator wires adapters into import, ingest, and export flows without owning IO. It performs multi-adapter import by auto-detecting adapters from file paths. See [packages/vault-core/src/core/vault.ts](packages/vault-core/src/core/vault.ts). +- Adapters remain independent; cross-adapter relationships are composed by hosts using the query interface. + +## Migration Workflow (Plan A) + +1. Adapter authors generate SQL locally (for example with Drizzle) and copy statements into the adapter migrations file. Example: [packages/vault-core/src/adapters/reddit/migrations/versions.ts](packages/vault-core/src/adapters/reddit/migrations/versions.ts). +2. Before touching tables, the vault runs per-adapter SQL migrations; ledger tables are managed automatically. See [packages/vault-core/src/core/migrations.ts](packages/vault-core/src/core/migrations.ts). +3. The export pipeline writes a migration metadata file alongside data files. See [packages/vault-core/src/core/import/migrationMetadata.ts](packages/vault-core/src/core/import/migrationMetadata.ts). + +## Import Path (Files ➜ DB) + +1. Host collects adapter files + codec (`ImportOptions`). +2. `createVault.importData`: + - Runs migrations + - Rehydrates dataset from codec files (skipping metadata directory) + - Detects source tag from metadata when available +3. `runImportPipeline` selects effective versions/transforms, runs `transformAndValidate`, and accepts optional overrides for tests. +4. A **required** `dataValidator` (drizzle-arktype) morphs + validates the transformed dataset. +5. `replaceAdapterTables` truncates and inserts into each adapter table. + +## Ingest Path (File ➜ DB) + +1. `createVault.ingestData` picks the matching `Ingestor.matches`. +2. `Ingestor.parse` returns the payload. +3. Adapter Standard Schema `validator` is mandatory; morphs value via `runValidation`. +4. `replaceAdapterTables` writes rows (same helper as import). + +## Export Path (DB ➜ Files) + +1. Each adapter table is read via Drizzle. +2. Codec transforms rows (`normalize` / `denormalize`) and writes deterministic file paths using adapter conventions. +3. Migration metadata file is added to the export bundle. + +## Host Responsibilities + +- Supply a Drizzle DB instance (core manages migration ledger tables automatically). +- Pass adapter list to vault (`UniqueAdapterIds` enforces unique IDs). +- Provide codecs for import/export and drizzle-arktype validators for import. +- Offer UI/CLI to run adapter transforms or ingestion pipelines as needed. + +## Key Entry Points + +- [`packages/vault-core/src/core/vault.ts`](packages/vault-core/src/core/vault.ts) +- [`packages/vault-core/src/core/migrations.ts`](packages/vault-core/src/core/migrations.ts) +- [`packages/vault-core/src/core/import/importPipeline.ts`](packages/vault-core/src/core/import/importPipeline.ts) +- [`packages/vault-core/src/core/adapter.ts`](packages/vault-core/src/core/adapter.ts) + +# This document is the minimal reference for contributors implementing adapters or host integrations going forward. + +> Plan B (inline diff using Drizzle internals) remains documented in comments in [packages/vault-core/src/core/migrations.ts](packages/vault-core/src/core/migrations.ts) for future exploration. + +## Import Path (multi-adapter: files to DB) + +1. Host collects a bundle as a map of file paths to File objects and selects a codec. Codec determines file extension and normalize or denormalize behavior. See [packages/vault-core/src/core/codec.ts](packages/vault-core/src/core/codec.ts). +2. The vault import groups files by detected adapter ID using the path convention adapterId/tableName/pk.json. Unknown adapters are skipped. +3. For each detected adapter, the vault: + - runs SQL migrations for that adapter + - parses each file, enforces the codec file extension, and skips the migration metadata directory + - denormalizes records with the codec and filters to actual table columns + - applies the adapter versions and transforms during the import pipeline. See [packages/vault-core/src/core/import/importPipeline.ts](packages/vault-core/src/core/import/importPipeline.ts). + - validates the transformed dataset using `drizzle-arktype` (NOT THE ADAPTER VALIDATOR) + - replaces adapter tables atomically by truncating and inserting rows +4. When present, the migration tag is detected from the metadata file and used for transform selection. + +### Edge cases and errors + +- Unknown adapter in a bundle: skipped +- Unknown table for a detected adapter: error with explicit message +- Wrong codec extension: error for the specific file +- No adapter validator: error for that adapter + +## Ingest Path (single file to DB) + +1. The vault selects the matching ingestor based on adapter ingestors metadata. +2. The ingestor parses the file and returns a payload in the adapter expected shape. +3. The adapter Standard Schema validator morphs and validates the payload. +4. The vault replaces the adapter tables using the same helper as the import path. + +## Export Path (DB to files) + +1. Each adapter table is read via Drizzle from the host-supplied database. +2. The codec normalizes rows and writes deterministic file paths using adapter conventions. +3. A migration metadata file is added to the export bundle. + +## Host Responsibilities + +- Supply a Drizzle-compatible database; vault-core manages migration ledger tables automatically. +- Pass the adapter list into the vault; adapter IDs must be unique. +- Provide a codec for import and export; callers do not pass validators or transform overrides. +- Execute vault operations in an environment that supports DDL (for example a server runtime backed by SQLite) to allow migrations to run. +- Offer UI or CLI to trigger import, export, and ingest operations as appropriate for your app. + +## Key Entry Points + +- [packages/vault-core/src/core/vault.ts](packages/vault-core/src/core/vault.ts) +- [packages/vault-core/src/core/migrations.ts](packages/vault-core/src/core/migrations.ts) +- [packages/vault-core/src/core/import/importPipeline.ts](packages/vault-core/src/core/import/importPipeline.ts) +- [packages/vault-core/src/core/adapter.ts](packages/vault-core/src/core/adapter.ts) +- [packages/vault-core/src/core/import/migrationMetadata.ts](packages/vault-core/src/core/import/migrationMetadata.ts) +- [packages/vault-core/src/core/codec.ts](packages/vault-core/src/core/codec.ts) + +This document is a minimal reference for contributors implementing adapters or host integrations. diff --git a/docs/vault-core-diagram.md b/docs/vault-core-diagram.md index 8f4066f7fa..fe82430204 100644 --- a/docs/vault-core-diagram.md +++ b/docs/vault-core-diagram.md @@ -1,8 +1,8 @@ -# Vault Core: architecture and adapter relationships +# Vault Core: architecture and adapter relationships (new) -Purpose: explain core functionality, how adapters plug in, and how data flows at runtime. This is a snapshot of the current code to help coworkers review what is exposed and how it connects. +Purpose: explain core functionality, how adapters plug in, and how data flows at runtime. Snapshot of the current VaultService-centric design. -Quick links to top level barrels: +Quick links - [src/index.ts](/packages/vault-core/src/index.ts) - [src/core/index.ts](/packages/vault-core/src/core/index.ts) @@ -10,138 +10,128 @@ Quick links to top level barrels: Core surfaces -- Vault container - - Class: [Vault()](/packages/vault-core/src/core/vault.ts) - - Init and migrations: [migrate()](/packages/vault-core/src/core/vault.ts) - - Import flow: [importBlob()](/packages/vault-core/src/core/vault.ts) - - Current layout exposure: [getCurrentLayout()](/packages/vault-core/src/core/vault.ts) - - Row counter used in summary: [countRecords()](/packages/vault-core/src/core/vault.ts) -- Vault configuration contract - - Interface: [VaultConfig()](/packages/vault-core/src/core/config.ts) - - Caller supplies: - - Database: any Drizzle SQLite database compatible with adapter schemas; see [CompatibleDB()](/packages/vault-core/src/core/adapter.ts) - - Platform migrate function: e.g. drizzle-orm migrator for libsql or better; passed to Vault -- Adapter contract and helper - - Contract: [Adapter()](/packages/vault-core/src/core/adapter.ts) - - Factory helper: [defineAdapter()](/packages/vault-core/src/core/adapter.ts) -- Schema readability utility - - Human readable schema and metadata merge: [readableSchemaInfo()](/packages/vault-core/src/core/strip.ts) +- VaultService orchestrator + - Class: [VaultService](/packages/vault-core/src/core/vault-service.ts) + - Blob import: `importBlob(blob, importerId)` (ArkType validation ON) + - Filesystem export/import: `export(importerId, store)`, `import(importerId, store)` (no ArkType) + - Migration orchestration: `migrateImportMigrate(importerId, store, { targetTag })` + - Optional Git helpers via SyncEngine: `gitPull()`, `gitCommit(msg)`, `gitPush()` +- Vault configuration + - Interface: [VaultServiceConfig](/packages/vault-core/src/core/config.ts) + - Caller supplies: Drizzle DB ([CompatibleDB](/packages/vault-core/src/core/adapter.ts)), `migrateFunc`, optional `syncEngine`, a single Markdown `codec`, and a convention profile +- Conventions & Codec + - Convention is part of [codec.ts](/packages/vault-core/src/core/codec.ts) via `defaultConvention()` + - Codec: [markdown](/packages/vault-core/src/codecs/markdown.ts) +- Sync Engine + - Interface: [SyncEngine](/packages/vault-core/src/core/sync.ts) + - Git implementation: [GitSyncEngine](/packages/vault-core/src/sync/git.ts) + - File store: [LocalFileStore](/packages/vault-core/src/fs/local-file-store.ts) +- Migrations utilities + - Module: [migrations.ts](/packages/vault-core/src/core/migrations.ts) + - Reads drizzle journal, plans SQL, drops adapter tables, applies steps (DB-specific TODOs called out) + Concrete adapter example: Reddit - Adapter factory: [redditAdapter()](/packages/vault-core/src/adapters/reddit/src/index.ts) -- Drizzle schema tables: [adapters/reddit/src/schema.ts](/packages/vault-core/src/adapters/reddit/src/schema.ts) -- Natural language column metadata: [metadata()](/packages/vault-core/src/adapters/reddit/src/metadata.ts) -- ArkType validation schema: [parseSchema()](/packages/vault-core/src/adapters/reddit/src/validation.ts) -- Parser (ZIP containing CSV files): [parseRedditExport()](/packages/vault-core/src/adapters/reddit/src/parse.ts) -- Upsert logic (transactional onConflictDoUpdate): [upsertRedditData()](/packages/vault-core/src/adapters/reddit/src/upsert.ts) -- Drizzle adapter config and migrations path: [adapters/reddit/src/drizzle.config.ts](/packages/vault-core/src/adapters/reddit/src/drizzle.config.ts) -- Config placeholder type: [adapters/reddit/src/config.ts](/packages/vault-core/src/adapters/reddit/src/config.ts) +- Drizzle schema tables: [schema.ts](/packages/vault-core/src/adapters/reddit/src/schema.ts) +- Column metadata: [metadata.ts](/packages/vault-core/src/adapters/reddit/src/metadata.ts) +- ArkType validation: [validation.ts](/packages/vault-core/src/adapters/reddit/src/validation.ts) +- Parser (ZIP of CSVs): [parse.ts](/packages/vault-core/src/adapters/reddit/src/parse.ts) +- Upsert logic: [upsert.ts](/packages/vault-core/src/adapters/reddit/src/upsert.ts) +- Drizzle config & migrations: [drizzle.config.ts](/packages/vault-core/src/adapters/reddit/src/drizzle.config.ts) Mermaid diagram: components and data flow -Arrow convention: arrows point from source to consumer (adapter-owned schema and metadata flow into core utilities; external DB flows into Vault and then into adapter upsert). +Arrows point from source to consumer. Adapters own schema and migrations; VaultService owns orchestration. ```mermaid flowchart LR subgraph Core - C1[Vault class] - C2[Vault config] - C3[Readable schema util] - C4[Adapter contract] - C5[Adapter factory helper] + VS[VaultService] + CFG[VaultConfig] + MIG[Migrations utils] end - subgraph Adapter_Reddit + subgraph Conventions_and_Codecs + CNV[Convention profile] + MDM[Markdown codec] + end + + subgraph Sync + SYN[SyncEngine interface] + GSE[GitSyncEngine] + FSI[FileStore interface] + LFS[LocalFileStore] + end + + subgraph Adapters_and_Importers + I1[Importer] A1[Adapter factory] A2[Drizzle schema] A3[Column metadata] A4[ArkType validator] - A5[ZIP CSV parser] + A5[Parser] A6[Upsert logic] A7[Drizzle config] end subgraph External - E1[Migrate function from caller] - E2[SQLite DB] - E3[Adapters barrel export] + MGR[Migrate function] + DB[SQLite DB] + GIT[Git repo] end - %% External injection - E2 -->|database instance| C1 - - %% Initialization path - C1 -->|init| C1M[Migrate step] - C1M -->|per adapter| A7 - A7 -->|adapter migrations folder| E1 - C1M -->|calls| E1 - E1 -->|uses adapter migrations folder| E2 - - %% Import path - U1[import blob] --> C1 - C1 -->|select adapter by id| A1 - A1 --> A5 - A5 --> A4 - A4 -->|asserts| C1U[valid parsed object] - C1U --> A6 - C1 -.->|db instance| A6 - A2 -->|table objects| A6 - A6 -->|insert or update| E2 - - %% Schema exposure - C1 -->|list layout| C3 - A2 -->|provides schema| C3 - A3 -->|provides descriptions| C3 - - %% Exports - X1[Package root export] -.-> C1 - X2[Adapters index export] -.-> A1 + %% Config and injection + DB -->|database instance| VS + CFG --> VS + VS --> SYN + SYN -.-> GSE + SYN --> FSI + GSE --> FSI + FSI --> LFS + LFS -.-> GIT + + %% Initialization & migrations + VS -->|migrate to head| MGR + MGR -->|uses| A7 + A7 -->|migrations folder| MGR + VS -->|use| MIG + + %% Blob import + U1[importBlob] --> VS + VS -->|select importer| I1 + I1 --> A5 --> A4 --> V1[validated data] + V1 --> A6 -->|insert or update| DB + A2 --> A6 + + %% Filesystem export/import + VS -->|export| MDM + VS -->|read and write| FSI + FSI --> LFS + LFS -->|files| GIT + VS -->|import no ArkType| MDM + MDM -->|parse, de/normalize| VS + VS -->|upsert via importer| A6 + + %% Migrate → Import → Migrate + VS -->|drop tables| MIG + VS -->|plan to tag| MIG + VS -->|apply plan| MIG + VS -->|import vault| LFS + VS -->|migrate to head| MGR %% Styling linkStyle default curve: basis ``` -Legend with exact code references - -- Core - - C1 Vault class → [Vault()](/packages/vault-core/src/core/vault.ts) - - C1M migrate step → [migrate()](/packages/vault-core/src/core/vault.ts) - - C1U validated parsed object produced inside → [importBlob()](/packages/vault-core/src/core/vault.ts) - - C2 Vault config → [VaultConfig()](/packages/vault-core/src/core/config.ts) - - C3 Readable schema util → [readableSchemaInfo()](/packages/vault-core/src/core/strip.ts) - - C4 Adapter contract → [Adapter()](/packages/vault-core/src/core/adapter.ts) - - C5 Adapter factory helper → [defineAdapter()](/packages/vault-core/src/core/adapter.ts) - - X1 Package root export → [src/index.ts](/packages/vault-core/src/index.ts) -- Adapter Reddit - - A1 Adapter factory → [redditAdapter()](/packages/vault-core/src/adapters/reddit/src/index.ts) - - A2 Drizzle schema module → [adapters/reddit/src/schema.ts](/packages/vault-core/src/adapters/reddit/src/schema.ts) - - A3 Column metadata → [metadata()](/packages/vault-core/src/adapters/reddit/src/metadata.ts) - - A4 ArkType validator → [parseSchema()](/packages/vault-core/src/adapters/reddit/src/validation.ts) - - A5 ZIP CSV parser → [parseRedditExport()](/packages/vault-core/src/adapters/reddit/src/parse.ts) - - A6 Upsert logic → [upsertRedditData()](/packages/vault-core/src/adapters/reddit/src/upsert.ts) - - A7 Drizzle config → [adapters/reddit/src/drizzle.config.ts](/packages/vault-core/src/adapters/reddit/src/drizzle.config.ts) - - E3 Adapters barrel export → [adapters/index.ts](/packages/vault-core/src/adapters/index.ts) - -Runtime relationships in prose - -- Ownership and boundaries - - SQLite DB instance: constructed externally by the caller and injected via [VaultConfig()](/packages/vault-core/src/core/config.ts) database. [Vault()](/packages/vault-core/src/core/vault.ts) stores this instance and passes it into the adapter’s upsert per the [Adapter()](/packages/vault-core/src/core/adapter.ts) contract. Core does not create or own the DB connection. - - Drizzle schema: defined entirely inside adapters (for example [adapters/reddit/src/schema.ts](/packages/vault-core/src/adapters/reddit/src/schema.ts)). Core never defines tables; it only reads adapter.schema for layout exposure via [readableSchemaInfo()](/packages/vault-core/src/core/strip.ts) and uses adapter.drizzleConfig for migrations during [migrate()](/packages/vault-core/src/core/vault.ts). - -- Initialization and migrations - - A caller prepares adapters and a migrate function via [VaultConfig()](/packages/vault-core/src/core/config.ts), then constructs or calls [Vault.create()](/packages/vault-core/src/core/vault.ts). - - Vault resolves the adapters barrel and scans for a factory whose id matches the selected adapter; for each match, Vault calls the caller’s migrate function with paths from adapter.drizzleConfig. Migrations folders are adapter-local to each adapter (for Reddit see [adapters/reddit/src/drizzle.config.ts](/packages/vault-core/src/adapters/reddit/src/drizzle.config.ts)), executed during [migrate()](/packages/vault-core/src/core/vault.ts). -- Import flow - - The caller invokes [importBlob()](/packages/vault-core/src/core/vault.ts) with a specific adapter id. - - Vault locates the adapter by id and performs parse, validate, upsert in sequence: - - Parse: adapter.parse; Reddit uses [parseRedditExport()](/packages/vault-core/src/adapters/reddit/src/parse.ts) - - Validate: adapter.validator.assert; Reddit uses [parseSchema()](/packages/vault-core/src/adapters/reddit/src/validation.ts) - - Upsert: adapter.upsert; Reddit uses [upsertRedditData()](/packages/vault-core/src/adapters/reddit/src/upsert.ts) - - Vault returns a summary including counts computed by [countRecords()](/packages/vault-core/src/core/vault.ts). -- Schema and metadata exposure - - Callers use [getCurrentLayout()](/packages/vault-core/src/core/vault.ts) to obtain a human readable view of tables and columns, derived from adapter.schema and adapter.metadata via [readableSchemaInfo()](/packages/vault-core/src/core/strip.ts). - -Notes and open edges - -- Adapter discovery during migrations currently uses a dynamic import and id matching inside [migrate()](/packages/vault-core/src/core/vault.ts). Vite or code-based-migration-definitions required to avoid this further. -- Views are supported by the contract via Adapter.views, but the Reddit adapter does not define any yet. Adding one or two examples would clarify patterns for future adapters. +Legend and notes + +- VS VaultService → [vault-service.ts](/packages/vault-core/src/core/vault-service.ts) +- MIG Migrations utils → [migrations.ts](/packages/vault-core/src/core/migrations.ts) +- CFG VaultConfig → [config.ts](/packages/vault-core/src/core/config.ts) +- CNV ConventionProfile → [codec.ts](/packages/vault-core/src/core/codec.ts) +- Codec → [markdown.ts](/packages/vault-core/src/codecs/markdown.ts) +- Sync surfaces → [sync.ts](/packages/vault-core/src/core/sync.ts) (SyncEngine), [git.ts](/packages/vault-core/src/sync/git.ts) +- File surfaces → [fs.ts](/packages/vault-core/src/core/fs.ts) (FileStore interface), [local-file-store.ts](/packages/vault-core/src/fs/local-file-store.ts) +- DB is external; core does not create or own the connection +- ArkType validation is only for blob imports; FS imports rely on DB constraints and upsert shaping diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index 4079da7b56..a9c4c30766 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -1 +1 @@ -export { safeLookup } from './object'; +export { safeLookup } from './object.js'; diff --git a/packages/vault-core/README.md b/packages/vault-core/README.md index 88dcb2d1ce..28086c7bb8 100644 --- a/packages/vault-core/README.md +++ b/packages/vault-core/README.md @@ -1,42 +1,172 @@ -# Vault Core +--- -This package houses the interfaces & supporting code for Epicenter's upcoming adapter ecosystem. +# vault-core -> This spec is in alpha, and will likely change significantly in the near future. Breaking changes will occur up until the 1.0 release. +> This represents a very early, proof-of-concept version of Vault Core. The API, features, architecture, applications, and more are all subject to significant change. +> Take this as a sneak-peek into ongoing work, not a stable library. -## Goal +A small, adapter-driven data vault. Each adapter owns its schema, validation, migrations, and ingest rules; the vault orchestrates import, export, and ingest, without coupling adapters together. Apps compose multiple adapters at runtime to build cross-adapter UX. -The goal of the adapter system is two-fold. +Highlights -1. Create a modular, extensible, centralized hub for your exported third-party data -2. Expose available tables/features/metadata for access via SQLite explorers, LLMs, MCP, and other tools +- Independent adapters: schemas are table-prefixed and migration-scoped per adapter +- Deterministic import/export shapes with codec-normalized files +- Per-adapter validation (Standard Schema; arktype-backed) enforced at import/ingest +- Migrations applied automatically before writes +- Multi-adapter import: one call processes a mixed bundle by auto-detecting adapters from file paths +- Runtime traversal: get a Drizzle-compatible db and adapter tables map for app-layer joins -## Summary +Quick links -Adapters are build on [Drizzle ORM](https://orm.drizzle.team/) and [ArkType](https://arktype.dev/). They expose: +- Vault constructor: [`createVault()`](packages/vault-core/src/core/vault.ts:31) +- Import (multi-adapter): [`importData()`](packages/vault-core/src/core/vault.ts:176) +- Export: [`exportData()`](packages/vault-core/src/core/vault.ts:116) +- Ingest (adapter-owned parsers): [`ingestData()`](packages/vault-core/src/core/vault.ts:284) +- Runtime traversal: [`getQueryInterface()`](packages/vault-core/src/core/vault.ts:317) +- Adapter definition: [`defineAdapter`](packages/vault-core/src/core/adapter.ts:137) -- SQLite table schema for persisting data - - Natural language mappings for tables and columns -- ArkType schema for data parsing -- Supporting parse and upsert functions +## Core concepts -> Formal specs for standardizing adapter behavior and capabilities are forthcoming. +Adapters -## Lifecycle +- An adapter bundles: + - Drizzle schema with table names prefixed by adapter id (e.g., `example_notes_items`) + - Versions and transforms for migrations + - A Standard Schema validator (arktype-backed) for parsed dataset shapes + - Optional ingestors for raw file formats +- Table prefixing and primary keys are compile-time checked; see [`core/adapter.ts`](packages/vault-core/src/core/adapter.ts:86). +- Adapters remain independent; the vault never couples their storage. -> This may change as drastically as we determine our requirements. +Validation -A core concept of adapters is modularity. Since we are relying on Drizzle for our schemas, we can easily add/remove/migrate tables at runtime, allowing for greater flexibility and adaptability to changing data requirements. +- During import and ingest, the vault runs a Standard Schema validator: + - Import uses the adapter’s validator to validate the parsed dataset before table replacement + - Ingest uses the adapter’s validator on the ingestor output +- Failed validation aborts the operation with detailed path messages; see error formatting in [`runValidation`](packages/vault-core/src/core/vault.ts:44). -> It hasn't been decided yet how adapters can be added/removed, but we are considering options such as configuration files, admin interfaces, or programmatic APIs. +Migrations -## Future Concerns +- Before touching an adapter’s tables, the vault ensures its SQL migrations are applied via [`runStartupSqlMigrations`](packages/vault-core/src/core/vault.ts:37). +- The export flow writes a per-adapter migration metadata file; import detects this metadata and records its tag. -- Virtual tables via Drizzle -- Type-safety to prevent table-name collisions +Codecs and format -## Status +- A codec defines parse/stringify and normalization/denormalization rules; JSON is the default via [`jsonFormat`](packages/vault-core/src/codecs/json.ts:3). +- Paths follow a deterministic convention (adapterId/tableName/pk.json) computed with the default convention used by export. -- [x] Primitive interfaces -- [ ] Supporting code -- [ ] Finalized interfaces +Compatible DB + +- The vault expects a Drizzle-compatible, SQLite-compatible DB. Use a server environment for DDL-backed features. Tests may make use of `bun:sqlite` in-memory DB. + +## API overview + +Construct + +- Create a vault bound to a DB instance and a set of adapters: + - [`createVault(options)`](packages/vault-core/src/core/vault.ts:31) where options include `database` (Drizzle-compatible) and `adapters` (array of adapter instances). + +Export + +- Export adapter data to a codec-normalized file bundle: + - [`exportData({ codec })`](packages/vault-core/src/core/vault.ts:116) returns `Map`: `{ path -> File }`. + - Exports all registered adapters by default; per-adapter migration metadata is included. + +Import (multi-adapter) + +- Import a mixed bundle of files; the vault auto-detects adapters based on path and processes each adapter independently: + - [`importData({ files, codec })`](packages/vault-core/src/core/vault.ts:176) + - `files`: Map of `path -> File` where path segments encode `adapterId/tableName/...` + - `codec`: the codec used for decode/denormalize (e.g., `jsonFormat`) + - For each detected adapter: + - Ensure migrations + - Parse files into a dataset keyed by de-prefixed table names + - Run the import pipeline (versions/transforms) + - Validate using the adapter’s Standard Schema validator + - Replace the adapter’s tables atomically + +Ingest + +- Run adapter-owned parsers on raw files: + - [`ingestData({ adapter, file })`](packages/vault-core/src/core/vault.ts:284) + - The vault selects the first ingestor that matches and validates the parsed dataset before replacement. + +Runtime traversal + +- Query at runtime and compose cross-adapter views in the app: + - [`getQueryInterface()`](packages/vault-core/src/core/vault.ts:317) returns `{ db, tables }` + - `db`: Drizzle-compatible DB + - `tables`: map of `adapterId -> adapter.schema`, suitable for joins + +## Import/export formats + +Exported paths + +- Paths follow `adapterId/tableName/pk.json`, for example: + - `reddit/reddit_posts/t3_abc123.json` + - `entity_index/entity_index_entities/entity:subreddit:sveltejs.json` + +Record content + +- JSON records include only table columns (normalized by codec). Primary key values are encoded in the path, not the JSON body. + +Import bundle rules + +- A single bundle can contain files for multiple adapters; `importData` will: + - Skip unknown adapter paths + - Throw on wrong file extensions + - Throw on unknown tables in a known adapter + - Replace tables for each adapter it successfully processes + +## Adapter authoring + +Minimal shape (TypeScript) + +- Use [`defineAdapter`](packages/vault-core/src/core/adapter.ts:137) to declare: + - `id` (string), `schema` (prefixed Drizzle tables), `versions`, `transforms`, + - Optionally `metadata` for documentation/UI + - Optionally `ingestors` for external inputs + - A Standard Schema `validator` for ingest data +- Prefixing: table names must begin with `adapterId_` (enforced at types); e.g., `example_notes_items` + +Validation shape + +- The parsed dataset shape is a de-prefixed object keyed by table names: + - Example: `{ items: Array, note_links?: Array }` +- Standard Schema (arktype) validators should accept this parsed shape and return the same shape; the vault serializes/denormalizes for storage as needed. + +## Server-backed ingestion + +Migrations require DDL, so run vault operations server-side with a DB like Bun SQLite + Drizzle: + +- For a reference implementation, see: + - Vault service singleton: [`apps/vault-demo/src/lib/server/vaultService.ts`](apps/vault-demo/src/lib/server/vaultService.ts + - Endpoints (SvelteKit +server.ts): + - Ingest: [`apps/vault-demo/src/routes/api/vault/ingest/+server.ts`](apps/vault-demo/src/routes/api/vault/ingest/+server.ts + - Import (multi-adapter): [`apps/vault-demo/src/routes/api/vault/import/+server.ts`](apps/vault-demo/src/routes/api/vault/import/+server.ts + - Export: [`apps/vault-demo/src/routes/api/vault/export/+server.ts`](apps/vault-demo/src/routes/api/vault/export/+server.ts + - Counts: [`apps/vault-demo/src/routes/api/vault/tables/+server.ts`](apps/vault-demo/src/routes/api/vault/tables/+server.ts + +## Demo app + +A minimal SvelteKit demo shows: + +- Import/export page calling `importData`/`exportData` via server endpoints: + - [`apps/vault-demo/src/routes/import-export/+page.svelte`](apps/vault-demo/src/routes/import-export/+page.svelte:1) +- Reddit GDPR ingest + entity suggestions → user-curated Entity Index import + - [`apps/vault-demo/src/routes/reddit-upload/+page.svelte`](apps/vault-demo/src/routes/reddit-upload/+page.svelte:1) + - Heuristics for subreddits, users, domains: + - [`apps/vault-demo/src/lib/extract/redditEntities.ts`](apps/vault-demo/src/lib/extract/redditEntities.ts +- Runtime cross-adapter UI (Dashboard, Entities, Notes) using `getQueryInterface()` + +## Notes on the new multi-adapter import + +- One-call, multi-adapter import is now the default +- Import replaces (not merges) the target adapter’s tables +- Unknown adapters and migration metadata files are skipped +- Strict validation is enforced per adapter; failed validation aborts that adapter’s import + +## Limitations and tips + +- Ensure your DB supports DDL; client-only mocks are not compatible with migrations +- The vault’s path convention is authoritative for identifying adapters/tables during import +- Use the adapter’s Standard Schema validator for dataset shapes; do not rely on caller-provided validators diff --git a/packages/vault-core/package.json b/packages/vault-core/package.json index 9ba0e63d53..dca94455ce 100644 --- a/packages/vault-core/package.json +++ b/packages/vault-core/package.json @@ -5,20 +5,26 @@ "type": "module", "exports": { ".": "./src/index.ts", - "./adapters/*": "./src/adapters/index.ts" + "./codecs": "./src/codecs/index.ts", + "./adapters/*": "./src/adapters/*/index.ts", + "./utils/*": "./src/utils/*/index.ts" + }, + "scripts": { + "check": "tsc --noEmit" }, "devDependencies": { + "@standard-schema/spec": "^1.0.0", + "bun-types": "^1.3.0", + "drizzle-kit": "catalog:", + "tsx": "^4.20.6", "typescript": "catalog:" }, - "scripts": { - "format": "echo 'skip format'", - "format:check": "echo 'skip format check'", - "lint": "echo 'skip lint'", - "check": "echo \"TODO add this in a later commit\"" - }, "dependencies": { "arktype": "catalog:", - "drizzle-kit": "catalog:", - "drizzle-orm": "catalog:" + "drizzle-arktype": "catalog:", + "drizzle-orm": "catalog:", + "fflate": "^0.8.2", + "toml": "^3.0.0", + "yaml": "^2.8.1" } } diff --git a/packages/vault-core/scripts/migrations-add.ts b/packages/vault-core/scripts/migrations-add.ts new file mode 100644 index 0000000000..aaadd32865 --- /dev/null +++ b/packages/vault-core/scripts/migrations-add.ts @@ -0,0 +1,407 @@ +#!/usr/bin/env node +/** + * POC migrations add script - moved to packages/vault-core/scripts + * Runs relative to CWD and supports copying drizzle migration files if present. + */ + +import { promises as fs } from 'node:fs'; +import path from 'node:path'; + +type Args = { + adapter?: string | undefined; + tag?: string | undefined; + sqlId?: string | undefined; + title?: string | undefined; + noTransform?: boolean | undefined; + cwd: string; +}; + +function printUsage() { + console.log( + ` +POC: migrations add + +Required: + --adapter, -a Adapter id (e.g., reddit) + --tag, -t 4-digit version tag (e.g., 0002) + +Optional: + --sql-id, -s Override SQL source id when auto-detection is undesirable + --title Comment header for migration.sql + --no-transform Skip creating versions//transform.ts + +Examples: + # Print-only (recommended): generates migration.sql and prints a ready-to-paste block + node packages/vault-core/scripts/migrations-add.poc.ts -a reddit -t 0002 -s 0002_forward_baseline +`.trim(), + ); +} + +function parseArgs(argv: string[], cwd: string): Args { + const args: Args = { cwd }; + const rest = [...argv]; + for (let i = 0; i < rest.length; i++) { + const cur = rest[i]; + if (cur === undefined) throw new Error('unexpected undefined arg'); + + if (cur === '--adapter' || cur === '-a') { + args.adapter = rest[i + 1]; + i++; + } else if (cur.startsWith('--adapter=')) { + args.adapter = cur.split('=')[1]; + } else if (cur === '--tag' || cur === '-t') { + args.tag = rest[i + 1]; + i++; + } else if (cur.startsWith('--tag=')) { + args.tag = cur.split('=')[1]; + } else if (cur === '--sql-id' || cur === '-s') { + args.sqlId = rest[i + 1]; + i++; + } else if (cur.startsWith('--sql-id=')) { + args.sqlId = cur.split('=')[1]; + } else if (cur === '--title') { + args.title = rest[i + 1]; + i++; + } else if (cur.startsWith('--title=')) { + args.title = cur.split('=')[1]; + } else if (cur === '--no-transform') { + args.noTransform = true; + } else { + // ignore unknown args for POC + } + } + return args; +} + +function assertValid(args: Args) { + const errors: string[] = []; + if (!args.adapter) errors.push('missing --adapter'); + if (!args.tag) errors.push('missing --tag'); + + const tag = args.tag ?? ''; + const sqlId = args.sqlId ?? ''; + + if (!/^\d{4}$/.test(tag)) { + errors.push(`invalid tag '${tag}', expected 4 digits like 0002`); + } + if (sqlId && !/^[A-Za-z0-9_-]+$/.test(sqlId)) { + errors.push( + `invalid sql-id '${sqlId}', allowed [A-Za-z0-9_\\-], e.g. 0002_add_foo`, + ); + } + + if (errors.length) { + for (const e of errors) console.error('•', e); + console.error(); + printUsage(); + process.exit(1); + } +} + +async function pathExists(p: string) { + try { + await fs.access(p); + return true; + } catch { + return false; + } +} + +async function ensureDir(p: string) { + await fs.mkdir(p, { recursive: true }); +} + +async function writeFileIfAbsent(p: string, content: string) { + if (await pathExists(p)) return false; + await fs.writeFile(p, content, 'utf8'); + return true; +} + +function sqlHeader( + adapter: string, + tag: string, + sqlId: string, + title?: string, +) { + const lines = [ + `-- adapter: ${adapter}`, + `-- tag: ${tag}`, + `-- sqlId: ${sqlId}`, + ...(title ? [`-- title: ${title}`] : []), + '--', + '-- Forward-only migration generated by POC scaffolder.', + '-- Add your DDL statements below.', + '', + ]; + return lines.join('\n'); +} + +function transformStub(tag: string) { + return `import type { DataTransform } from '../../../core/migrations'; + +export const transform_${tag}: DataTransform = async (input) => { + // TODO: transform data from previous version to ${tag} + return input; +}; +`; +} + +/** + * Split SQL text into executable statements. + * Mirrors core split logic (drizzle '--> statement-breakpoint' or semicolons). + */ +function splitSqlText(text: string): string[] { + if (text.includes('--> statement-breakpoint')) { + return text + .split(/-->\s*statement-breakpoint\s*/g) + .map((s) => s.trim()) + .filter((s) => s.length > 0); + } + return text + .split(/;\s*(?:\r?\n|$)/g) + .map((s) => s.trim()) + .filter((s) => s.length > 0) + .map((s) => (s.endsWith(';') ? s : `${s};`)); +} + +/** + * Format a versions.ts entry block for copy-paste into an adapter's versions tuple. + * Mirrors the style used in existing adapters (backticked SQL strings, indented). + */ +function formatVersionBlock(tag: string, sqlStatements: string[]): string { + const body = sqlStatements + .map((stmt) => { + // escape backticks and indent multi-line for readability + const escaped = stmt.replace(/`/g, '\\`').replace(/\r?\n/g, '\n\t\t\t'); + return `\t\t\t\`${escaped}\``; + }) + .join(',\n'); + + return [ + '\t{', + `\t\ttag: '${tag}',`, + '\t\t// Generated by migrations-add script', + '\t\tsql: [', + body, + '\t\t],', + '\t},', + ].join('\n'); +} + +/** + * Append a new version object to the adapter's migrations/versions.ts defineVersions() tuple, + * embedding the provided SQL statements inline. + */ +/** + * Try to copy drizzle-produced migration files from common locations under cwd. + * This is a best-effort convenience: hosts may produce drizzle migration outputs in different places. + */ +async function copyDrizzleMigrationFiles( + cwd: string, + tag: string, + destDir: string, + sqlPath: string, +) { + const candidates = [ + 'migrations', + 'drizzle/migrations', + 'migrations/drizzle', + 'drizzle', + ]; + for (const c of candidates) { + const full = path.join(cwd, c); + if (!(await pathExists(full))) continue; + const files = await fs.readdir(full); + const matched: string[] = []; + for (const f of files) { + if (f.includes(tag)) matched.push(f); + } + if (matched.length === 0) return; + + for (const f of matched) { + const src = path.join(full, f); + const dst = path.join(destDir, f); + try { + await fs.copyFile(src, dst); + console.log(` ✓ copied drizzle file ${src} -> ${dst}`); + } catch (err) { + console.warn(` ! failed copying ${src} -> ${dst}: ${err}`); + } + + // If .sql, append its contents into migration.sql + if (path.extname(f).toLowerCase() === '.sql') { + try { + const sqlText = await fs.readFile(src, 'utf8'); + await fs.appendFile( + sqlPath, + `\n-- == Imported from ${src} ==\n${sqlText}\n`, + ); + console.log(` ✓ appended SQL from ${src} into ${sqlPath}`); + } catch (err) { + console.warn( + ` ! failed appending SQL from ${src} into ${sqlPath}: ${err}`, + ); + } + } + } + + return; + } +} + +async function main() { + const argv = process.argv.slice(2); + if (argv.length === 0 || argv.includes('--help') || argv.includes('-h')) { + printUsage(); + process.exit(0); + } + + const args = parseArgs(argv, process.cwd()); + assertValid(args); + + const adapter = args.adapter as string; + const tag = args.tag as string; + let sqlId = args.sqlId; + const title = args.title; + + // Resolve key paths (operate on CWD) + const root = args.cwd; + const adapterDir = path.join( + root, + 'packages', + 'vault-core', + 'src', + 'adapters', + adapter, + ); + const versionsFilePath = path.join(adapterDir, 'migrations', 'versions.ts'); + const versionsDir = path.join(adapterDir, 'migrations', 'versions', tag); + const sqlPath = path.join(versionsDir, 'migration.sql'); + const transformPath = path.join(versionsDir, 'transform.ts'); + const migrationsDir = path.join(adapterDir, 'migrations'); + + // Validate adapter and manifest presence + if (!(await pathExists(adapterDir))) { + console.error(`❌ adapter directory not found: ${adapterDir}`); + process.exit(1); + } + if (!(await pathExists(versionsFilePath))) { + console.error( + `❌ versions.ts not found for adapter '${adapter}': ${versionsFilePath}`, + ); + process.exit(1); + } + + let sqlSourcePath: string | undefined; + if (!sqlId) { + if (await pathExists(migrationsDir)) { + const files = await fs.readdir(migrationsDir); + const matches = files.filter( + (f) => f.startsWith(`${tag}_`) && f.toLowerCase().endsWith('.sql'), + ); + if (matches.length === 1) { + const match = matches[0]; + if (match === undefined) throw new Error('unexpected undefined match'); + + sqlId = path.parse(match).name; + sqlSourcePath = path.join(migrationsDir, match); + console.log( + ` ✓ detected sql-id '${sqlId}' from ${path.relative(root, sqlSourcePath)}`, + ); + } else if (matches.length > 1) { + console.error( + `❌ multiple SQL files matched tag '${tag}'. Provide --sql-id to disambiguate:`, + ); + for (const match of matches) { + console.error( + ` ${path.relative(root, path.join(migrationsDir, match))}`, + ); + } + process.exit(1); + } + } + if (!sqlId) { + console.error( + `❌ unable to detect SQL source for tag '${tag}'. Provide --sql-id explicitly.`, + ); + process.exit(1); + } + } else { + const candidate = path.join(migrationsDir, `${sqlId}.sql`); + if (await pathExists(candidate)) { + sqlSourcePath = candidate; + } + } + + const resolvedSqlId = sqlId; + if (!resolvedSqlId) { + throw new Error('failed to resolve sqlId after detection'); + } + + // Create dirs and files + await ensureDir(versionsDir); + + const sqlCreated = await writeFileIfAbsent( + sqlPath, + `${sqlHeader(adapter, tag, resolvedSqlId, title)}`, + ); + if (sqlCreated) { + console.log(` ✓ created ${sqlPath}`); + } else { + console.log(` • exists ${sqlPath}`); + } + + if (sqlCreated && sqlSourcePath) { + const sqlText = await fs.readFile(sqlSourcePath, 'utf8'); + const normalized = sqlText.endsWith('\n') ? sqlText : `${sqlText}\n`; + await fs.appendFile( + sqlPath, + `\n-- == Imported from ${path.relative(root, sqlSourcePath)} ==\n${normalized}`, + ); + console.log( + ` ✓ copied ${path.relative(root, sqlSourcePath)} into ${sqlPath}`, + ); + } else if (sqlCreated && !sqlSourcePath) { + console.log( + ` • no existing SQL artifact found for tag '${tag}' in ${path.relative(root, migrationsDir)}`, + ); + } + + if (!args.noTransform) { + const tCreated = await writeFileIfAbsent(transformPath, transformStub(tag)); + if (tCreated) { + console.log(` ✓ created ${transformPath}`); + } else { + console.log(` • exists ${transformPath}`); + } + } else { + console.log(' • skipped transform stub (--no-transform)'); + } + + // Try to copy drizzle migration files (best-effort) and append .sql to migration.sql + await copyDrizzleMigrationFiles(root, tag, versionsDir, sqlPath); + + // Read migration.sql, split into statements + const finalSqlText = await fs.readFile(sqlPath, 'utf8'); + console.log(sqlPath); + const statements = splitSqlText(finalSqlText); + + // Always print a ready-to-paste block for developer convenience + console.log( + '\n--- Copy & paste into your adapter migrations/versions.ts ---\n', + ); + console.log(formatVersionBlock(tag, statements)); + console.log('\n--- end block ---\n'); + + // Script operates in print-only mode; developer must manually update migrations/versions.ts + console.log( + 'Print-only mode (no file edits). Manually update migrations/versions.ts with the block above.', + ); + + console.log('\nDone.'); +} + +main().catch((err) => { + console.error('❌ Error:', err?.message ?? err); + process.exit(1); +}); diff --git a/packages/vault-core/src/adapters/entity-index/index.ts b/packages/vault-core/src/adapters/entity-index/index.ts new file mode 100644 index 0000000000..18cb7d8ea5 --- /dev/null +++ b/packages/vault-core/src/adapters/entity-index/index.ts @@ -0,0 +1 @@ +export { entityIndexAdapter } from './src/adapter'; diff --git a/packages/vault-core/src/adapters/entity-index/migrations/transforms.ts b/packages/vault-core/src/adapters/entity-index/migrations/transforms.ts new file mode 100644 index 0000000000..44d1f72340 --- /dev/null +++ b/packages/vault-core/src/adapters/entity-index/migrations/transforms.ts @@ -0,0 +1,3 @@ +import { defineTransformRegistry } from '../../../core/migrations'; + +export const entityIndexTransforms = defineTransformRegistry({}); diff --git a/packages/vault-core/src/adapters/entity-index/migrations/versions.ts b/packages/vault-core/src/adapters/entity-index/migrations/versions.ts new file mode 100644 index 0000000000..4b824bb9c5 --- /dev/null +++ b/packages/vault-core/src/adapters/entity-index/migrations/versions.ts @@ -0,0 +1,28 @@ +/** + * Entity Index adapter migration versions. + * Single baseline version derived from the Drizzle schema in src/adapter.ts. + * SQL is inlined to keep migrations environment-agnostic (mirrors Reddit/Notes). + */ +import { defineVersions } from '../../../core/migrations'; + +export const entityIndexVersions = defineVersions({ + tag: '0000', + sql: [ + `CREATE TABLE \`entity_index_entities\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`name\` text, + \`type\` text, + \`description\` text, + \`public_id\` text, + \`created_at\` integer +);`, + `CREATE TABLE \`entity_index_occurrences\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`entity_id\` text, + \`source_adapter_id\` text, + \`source_table_name\` text, + \`source_pk_json\` text, + \`discovered_at\` integer +);`, + ], +}); diff --git a/packages/vault-core/src/adapters/entity-index/src/adapter.ts b/packages/vault-core/src/adapters/entity-index/src/adapter.ts new file mode 100644 index 0000000000..11b5934eff --- /dev/null +++ b/packages/vault-core/src/adapters/entity-index/src/adapter.ts @@ -0,0 +1,62 @@ +import { defineAdapter } from '@repo/vault-core'; +import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import type { AdapterMetadata } from '../../../core/adapter'; +import { entityIndexTransforms } from '../migrations/transforms'; +import { entityIndexVersions } from '../migrations/versions'; + +/** + * Drizzle schema for the entity_index adapter. + * Table names must be prefixed with the adapter id. + */ +export const schema = { + entity_index_entities: sqliteTable('entity_index_entities', { + id: text('id').primaryKey(), + name: text('name'), + type: text('type'), + description: text('description'), + public_id: text('public_id'), + // Epoch milliseconds as integer + created_at: integer('created_at', { mode: 'timestamp' }), + }), + entity_index_occurrences: sqliteTable('entity_index_occurrences', { + id: text('id').primaryKey(), + entity_id: text('entity_id'), + source_adapter_id: text('source_adapter_id'), + source_table_name: text('source_table_name'), + // JSON string (canonical) of the primary key from the source table + source_pk_json: text('source_pk_json'), + // Epoch milliseconds as integer + discovered_at: integer('discovered_at', { mode: 'timestamp' }), + }), +} as const; + +/** Human-friendly column descriptions. */ +export const metadata: AdapterMetadata = { + entity_index_entities: { + id: 'Primary key', + name: 'Entity name', + type: 'Entity type/category', + description: 'Optional description of the entity', + public_id: 'Optional stable public id for cross-adapter linking', + created_at: 'Creation time in epoch milliseconds (stored as INTEGER)', + }, + entity_index_occurrences: { + id: 'Primary key', + entity_id: + 'Logical reference to entity_index_entities.id (no FK enforcement)', + source_adapter_id: 'Adapter id where this occurrence was discovered', + source_table_name: 'Source table name within the adapter', + source_pk_json: + 'Canonical JSON string of the source primary key (e.g., {"id":"t3_abc"})', + discovered_at: 'Discovery time in epoch milliseconds (stored as INTEGER)', + }, +}; + +/** Unified adapter export, no ingestors required for this adapter. */ +export const entityIndexAdapter = defineAdapter(() => ({ + id: 'entity_index', + schema, + metadata, + versions: entityIndexVersions, + transforms: entityIndexTransforms, +})); diff --git a/packages/vault-core/src/adapters/example-notes/index.ts b/packages/vault-core/src/adapters/example-notes/index.ts new file mode 100644 index 0000000000..d89b18e19a --- /dev/null +++ b/packages/vault-core/src/adapters/example-notes/index.ts @@ -0,0 +1 @@ +export { exampleNotesAdapter } from './src/adapter'; diff --git a/packages/vault-core/src/adapters/example-notes/migrations/transforms.ts b/packages/vault-core/src/adapters/example-notes/migrations/transforms.ts new file mode 100644 index 0000000000..c32e662393 --- /dev/null +++ b/packages/vault-core/src/adapters/example-notes/migrations/transforms.ts @@ -0,0 +1,11 @@ +/** + * Transform registry for example_notes adapter. + * + * With versions ['0000', '0001'], provide a no-op transform for target tag '0001' + * to keep registry length aligned with versions (all tags except the first/baseline). + */ +import { defineTransformRegistry } from '../../../core/migrations'; + +export const exampleNotesTransforms = defineTransformRegistry({ + '0001': (input) => input, +}); diff --git a/packages/vault-core/src/adapters/example-notes/migrations/versions.ts b/packages/vault-core/src/adapters/example-notes/migrations/versions.ts new file mode 100644 index 0000000000..e4c1008025 --- /dev/null +++ b/packages/vault-core/src/adapters/example-notes/migrations/versions.ts @@ -0,0 +1,30 @@ +/** + * Migration versions for the example_notes adapter. + * + * Single baseline version that mirrors the Drizzle-generated SQL for the schema + * defined in src/adapter.ts. This follows the Reddit adapter pattern of inlining + * the SQL artifacts for environment-agnostic startup migrations. + */ +import { defineVersions } from '../../../core/migrations'; + +export const exampleNotesVersions = defineVersions( + { + tag: '0000', + sql: [ + `CREATE TABLE \`example_notes_items\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`title\` text, + \`body\` text, + \`tags\` text DEFAULT '[]', + \`created_at\` integer, + \`public_id\` text +);`, + ], + }, + { + tag: '0001', + sql: [ + `ALTER TABLE example_notes_items ADD COLUMN entity_links text NOT NULL DEFAULT '[]';`, + ], + }, +); diff --git a/packages/vault-core/src/adapters/example-notes/src/adapter.ts b/packages/vault-core/src/adapters/example-notes/src/adapter.ts new file mode 100644 index 0000000000..07b63ef6a7 --- /dev/null +++ b/packages/vault-core/src/adapters/example-notes/src/adapter.ts @@ -0,0 +1,61 @@ +/** + * Example Notes adapter refined to mirror Reddit adapter patterns. + * + * - Uses Drizzle schema helpers with intuitive types + * - created_at is an integer (epoch ms) via Drizzle integer timestamp column (typed as number) + * - tags is a TEXT column storing a JSON array string, default "[]" + * - Exposes an arktype-backed StandardSchemaV1-compatible validator + * - Keeps table prefix: example_notes_items + */ + +import { defineAdapter } from '@repo/vault-core'; +import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import type { AdapterMetadata } from '../../../core/adapter'; +import { exampleNotesTransforms } from '../migrations/transforms'; +import { exampleNotesVersions } from '../migrations/versions'; + +/** + * Drizzle schema for the example_notes adapter. + * Table names are prefixed with the adapter id: `example_notes_items` + */ +export const schema = { + example_notes_items: sqliteTable('example_notes_items', { + id: text('id').primaryKey(), + title: text('title'), + body: text('body'), + // JSON array string (canonical format) with default [] + tags: text('tags').default('[]').$type(), + // JSON array string (canonical format) of Entity IDs, default [] + entity_links: text('entity_links').notNull().default('[]'), + // Epoch milliseconds as integer; typed as number in TS + created_at: integer('created_at', { mode: 'timestamp' }), + public_id: text('public_id'), + }), +} as const; + +/** + * Human-friendly metadata for adapter tables/columns. + */ +export const metadata = { + example_notes_items: { + id: 'Primary key', + title: 'Note title', + body: 'Note body', + tags: 'JSON array stored as TEXT (default "[]")', + entity_links: + 'JSON array (string[]) of Entity IDs from entity_index_entities, stored as TEXT JSON (default "[]")', + created_at: 'Creation time in epoch milliseconds (SQLite integer)', + public_id: 'Optional stable public id for cross-adapter linking', + }, +} satisfies AdapterMetadata; + +/** + * Export the adapter definition. No ingestors for this example. + */ +export const exampleNotesAdapter = defineAdapter(() => ({ + id: 'example_notes', + schema, + metadata, + versions: exampleNotesVersions, + transforms: exampleNotesTransforms, +})); diff --git a/packages/vault-core/src/adapters/index.ts b/packages/vault-core/src/adapters/index.ts deleted file mode 100644 index 4db245e24f..0000000000 --- a/packages/vault-core/src/adapters/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './reddit/index'; diff --git a/packages/vault-core/src/adapters/reddit/README.md b/packages/vault-core/src/adapters/reddit/README.md index 43a211f16c..c29ffc68a6 100644 --- a/packages/vault-core/src/adapters/reddit/README.md +++ b/packages/vault-core/src/adapters/reddit/README.md @@ -18,3 +18,36 @@ To migrate: - `cd` project directory - `bun run migrate` + +## Migrations (Plan A) + +This adapter ships two kinds of migrations: + +1. SQL schema migrations (forward-only; embedded inline) + +- Embedded directly in ./migrations/manifest.ts via redditVersions using "sql" (string[]) or "sqlText" (string) +- No node:fs required; core will split "sqlText" on drizzle "--> statement-breakpoint" markers or on semicolons as a fallback +- Note: legacy .sql files may exist in ./migrations/ for reference, but the manifest's inline SQL is the source of truth + +2. JS data transforms (version-to-version) + +- Location: ./migrations/transforms.ts +- A TransformRegistry keyed by the target tag; each function converts data from the previous version into the current target version +- Typed with defineTransformRegistry and RequiredTransformTags so every forward step is covered + +How hosts run this (no node: imports required in core): + +- Startup SQL migrations (schema) + - Call `runStartupSqlMigrations(adapter.id, adapter.versions, db, reporter)` from `@repo/vault-core` + - Pass the same Drizzle DB that Vault uses; core ensures the ledger tables exist and replays the embedded SQL + +- Data transforms + validation (content) + - Use transformAndValidate(manifest, transforms, dataset, sourceTag, validator?) + - transforms is the TransformRegistry that converts dataset from sourceTag up to manifest.currentTag + - validator is optional; when provided, it should return the morphed value or throw on failure (see redditDataValidator) + +Quick reference + +- Manifest: ./migrations/manifest.ts +- Transforms: ./migrations/transforms.ts +- SQL artifacts: ./migrations/\*.sql diff --git a/packages/vault-core/src/adapters/reddit/drizzle.config.ts b/packages/vault-core/src/adapters/reddit/drizzle.config.ts new file mode 100644 index 0000000000..d86cf3b743 --- /dev/null +++ b/packages/vault-core/src/adapters/reddit/drizzle.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from 'drizzle-kit'; + +export default defineConfig({ + dialect: 'sqlite', + schema: './src/schema.ts', + out: './migrations', +}); diff --git a/packages/vault-core/src/adapters/reddit/index.ts b/packages/vault-core/src/adapters/reddit/index.ts index 2cfd0f1e64..511d919a21 100644 --- a/packages/vault-core/src/adapters/reddit/index.ts +++ b/packages/vault-core/src/adapters/reddit/index.ts @@ -1 +1 @@ -export { redditAdapter as reddit } from './src'; +export { redditAdapter } from './src/adapter'; diff --git a/packages/vault-core/src/adapters/reddit/migrations/0000_polite_magdalene.sql b/packages/vault-core/src/adapters/reddit/migrations/0000_silent_zaran.sql similarity index 88% rename from packages/vault-core/src/adapters/reddit/migrations/0000_polite_magdalene.sql rename to packages/vault-core/src/adapters/reddit/migrations/0000_silent_zaran.sql index 871a7ed0ed..16516567eb 100644 --- a/packages/vault-core/src/adapters/reddit/migrations/0000_polite_magdalene.sql +++ b/packages/vault-core/src/adapters/reddit/migrations/0000_silent_zaran.sql @@ -65,12 +65,10 @@ CREATE TABLE `reddit_comments` ( `id` text PRIMARY KEY NOT NULL, `permalink` text NOT NULL, `date` integer NOT NULL, - `created_utc` integer NOT NULL, `ip` text, `subreddit` text NOT NULL, `gildings` integer, `link` text NOT NULL, - `post_id` text, `parent` text, `body` text, `media` text @@ -99,38 +97,34 @@ CREATE TABLE `reddit_friends` ( ); --> statement-breakpoint CREATE TABLE `reddit_gilded_content` ( - `content_link` text, + `content_link` text PRIMARY KEY NOT NULL, `award` text, `amount` text, `date` integer ); --> statement-breakpoint -CREATE UNIQUE INDEX `reddit_gilded_content_content_award_date_uq` ON `reddit_gilded_content` (`content_link`,`award`,`date`);--> statement-breakpoint CREATE TABLE `reddit_gold_received` ( - `content_link` text, + `content_link` text PRIMARY KEY NOT NULL, `gold_received` text, `gilder_username` text, `date` integer ); --> statement-breakpoint -CREATE UNIQUE INDEX `reddit_gold_received_content_date_uq` ON `reddit_gold_received` (`content_link`,`date`);--> statement-breakpoint CREATE TABLE `reddit_hidden_posts` ( `id` text PRIMARY KEY NOT NULL, `permalink` text NOT NULL ); --> statement-breakpoint CREATE TABLE `reddit_ip_logs` ( - `date` integer, + `date` integer PRIMARY KEY NOT NULL, `ip` text ); --> statement-breakpoint -CREATE UNIQUE INDEX `reddit_ip_logs_date_ip_uq` ON `reddit_ip_logs` (`date`,`ip`);--> statement-breakpoint CREATE TABLE `reddit_linked_identities` ( - `issuer_id` text, - `subject_id` text + `issuer_id` text PRIMARY KEY NOT NULL, + `subject_id` text NOT NULL ); --> statement-breakpoint -CREATE UNIQUE INDEX `reddit_linked_identities_issuer_subject_uq` ON `reddit_linked_identities` (`issuer_id`,`subject_id`);--> statement-breakpoint CREATE TABLE `reddit_linked_phone_number` ( `phone_number` text PRIMARY KEY NOT NULL ); @@ -198,7 +192,7 @@ CREATE TABLE `reddit_multireddits` ( --> statement-breakpoint CREATE TABLE `reddit_payouts` ( `payout_amount_usd` text, - `date` integer, + `date` integer PRIMARY KEY NOT NULL, `payout_id` text ); --> statement-breakpoint @@ -208,7 +202,7 @@ CREATE TABLE `reddit_persona` ( ); --> statement-breakpoint CREATE TABLE `reddit_poll_votes` ( - `post_id` text, + `post_id` text PRIMARY KEY NOT NULL, `user_selection` text, `text` text, `image_url` text, @@ -216,7 +210,6 @@ CREATE TABLE `reddit_poll_votes` ( `stake_amount` text ); --> statement-breakpoint -CREATE UNIQUE INDEX `reddit_poll_votes_post_user_uq` ON `reddit_poll_votes` (`post_id`,`user_selection`);--> statement-breakpoint CREATE TABLE `reddit_post_headers` ( `id` text PRIMARY KEY NOT NULL, `permalink` text NOT NULL, @@ -237,7 +230,6 @@ CREATE TABLE `reddit_posts` ( `id` text PRIMARY KEY NOT NULL, `permalink` text NOT NULL, `date` integer NOT NULL, - `created_utc` integer NOT NULL, `ip` text, `subreddit` text NOT NULL, `gildings` integer, diff --git a/packages/vault-core/src/adapters/reddit/migrations/meta/0000_snapshot.json b/packages/vault-core/src/adapters/reddit/migrations/meta/0000_snapshot.json index a0e97dc616..7723658b8b 100644 --- a/packages/vault-core/src/adapters/reddit/migrations/meta/0000_snapshot.json +++ b/packages/vault-core/src/adapters/reddit/migrations/meta/0000_snapshot.json @@ -1,7 +1,7 @@ { "version": "6", "dialect": "sqlite", - "id": "582cf3d6-8a6e-4223-b2bf-faa81e65cd39", + "id": "fcd987c1-fe5c-40ee-8df4-ad2cbdb10ed3", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "reddit_account_gender": { @@ -383,13 +383,6 @@ "notNull": true, "autoincrement": false }, - "created_utc": { - "name": "created_utc", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, "ip": { "name": "ip", "type": "text", @@ -418,13 +411,6 @@ "notNull": true, "autoincrement": false }, - "post_id": { - "name": "post_id", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, "parent": { "name": "parent", "type": "text", @@ -591,8 +577,8 @@ "content_link": { "name": "content_link", "type": "text", - "primaryKey": false, - "notNull": false, + "primaryKey": true, + "notNull": true, "autoincrement": false }, "award": { @@ -617,13 +603,7 @@ "autoincrement": false } }, - "indexes": { - "reddit_gilded_content_content_award_date_uq": { - "name": "reddit_gilded_content_content_award_date_uq", - "columns": ["content_link", "award", "date"], - "isUnique": true - } - }, + "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {}, @@ -635,8 +615,8 @@ "content_link": { "name": "content_link", "type": "text", - "primaryKey": false, - "notNull": false, + "primaryKey": true, + "notNull": true, "autoincrement": false }, "gold_received": { @@ -661,13 +641,7 @@ "autoincrement": false } }, - "indexes": { - "reddit_gold_received_content_date_uq": { - "name": "reddit_gold_received_content_date_uq", - "columns": ["content_link", "date"], - "isUnique": true - } - }, + "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {}, @@ -703,8 +677,8 @@ "date": { "name": "date", "type": "integer", - "primaryKey": false, - "notNull": false, + "primaryKey": true, + "notNull": true, "autoincrement": false }, "ip": { @@ -715,13 +689,7 @@ "autoincrement": false } }, - "indexes": { - "reddit_ip_logs_date_ip_uq": { - "name": "reddit_ip_logs_date_ip_uq", - "columns": ["date", "ip"], - "isUnique": true - } - }, + "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {}, @@ -733,25 +701,19 @@ "issuer_id": { "name": "issuer_id", "type": "text", - "primaryKey": false, - "notNull": false, + "primaryKey": true, + "notNull": true, "autoincrement": false }, "subject_id": { "name": "subject_id", "type": "text", "primaryKey": false, - "notNull": false, + "notNull": true, "autoincrement": false } }, - "indexes": { - "reddit_linked_identities_issuer_subject_uq": { - "name": "reddit_linked_identities_issuer_subject_uq", - "columns": ["issuer_id", "subject_id"], - "isUnique": true - } - }, + "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {}, @@ -1148,8 +1110,8 @@ "date": { "name": "date", "type": "integer", - "primaryKey": false, - "notNull": false, + "primaryKey": true, + "notNull": true, "autoincrement": false }, "payout_id": { @@ -1195,8 +1157,8 @@ "post_id": { "name": "post_id", "type": "text", - "primaryKey": false, - "notNull": false, + "primaryKey": true, + "notNull": true, "autoincrement": false }, "user_selection": { @@ -1235,13 +1197,7 @@ "autoincrement": false } }, - "indexes": { - "reddit_poll_votes_post_user_uq": { - "name": "reddit_poll_votes_post_user_uq", - "columns": ["post_id", "user_selection"], - "isUnique": true - } - }, + "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {}, @@ -1361,13 +1317,6 @@ "notNull": true, "autoincrement": false }, - "created_utc": { - "name": "created_utc", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, "ip": { "name": "ip", "type": "text", diff --git a/packages/vault-core/src/adapters/reddit/migrations/meta/_journal.json b/packages/vault-core/src/adapters/reddit/migrations/meta/_journal.json index 8bafe11bda..667719c66f 100644 --- a/packages/vault-core/src/adapters/reddit/migrations/meta/_journal.json +++ b/packages/vault-core/src/adapters/reddit/migrations/meta/_journal.json @@ -5,8 +5,8 @@ { "idx": 0, "version": "6", - "when": 1755190101550, - "tag": "0000_polite_magdalene", + "when": 1760567304216, + "tag": "0000_silent_zaran", "breakpoints": true } ] diff --git a/packages/vault-core/src/adapters/reddit/migrations/transforms.ts b/packages/vault-core/src/adapters/reddit/migrations/transforms.ts new file mode 100644 index 0000000000..205d7d0c08 --- /dev/null +++ b/packages/vault-core/src/adapters/reddit/migrations/transforms.ts @@ -0,0 +1,9 @@ +import { defineTransformRegistry } from '../../../core/migrations'; + +/** + * Reddit transform registry: keyed by target tag. + * 0001: baseline forward step; currently a no-op. Replace with real transforms as schema evolves. + * + * Note: we pass the required tag union to enforce compile-time coverage. + */ +export const redditTransforms = defineTransformRegistry({}); diff --git a/packages/vault-core/src/adapters/reddit/migrations/versions.ts b/packages/vault-core/src/adapters/reddit/migrations/versions.ts new file mode 100644 index 0000000000..b04c9c03c9 --- /dev/null +++ b/packages/vault-core/src/adapters/reddit/migrations/versions.ts @@ -0,0 +1,284 @@ +import { defineVersions } from '../../../core/migrations'; + +/** + * Reddit adapter migration versions (Plan A baseline). + * - Integer version tags; sqlId maps to the SQL artifact name. + * - Owned resources enumerate adapter-owned DB objects for reset/export scoping. + * + * We export 'redditVersions' as a readonly tuple so transform registries can derive + * required keys at compile-time. + */ +export const redditVersions = defineVersions({ + tag: '0000', + // Generated by migrations-add script + sql: [ + `CREATE TABLE \`reddit_account_gender\` ( + \`id\` text PRIMARY KEY DEFAULT 'singleton' NOT NULL, + \`account_gender\` text + );`, + `CREATE TABLE \`reddit_announcements\` ( + \`announcement_id\` text PRIMARY KEY NOT NULL, + \`sent_at\` integer, + \`read_at\` integer, + \`from_id\` text, + \`from_username\` text, + \`subject\` text, + \`body\` text, + \`url\` text + );`, + `CREATE TABLE \`reddit_approved_submitter_subreddits\` ( + \`subreddit\` text PRIMARY KEY NOT NULL + );`, + `CREATE TABLE \`reddit_birthdate\` ( + \`id\` text PRIMARY KEY DEFAULT 'singleton' NOT NULL, + \`birthdate\` integer, + \`verified_birthdate\` integer, + \`verification_state\` text, + \`verification_method\` text + );`, + `CREATE TABLE \`reddit_chat_history\` ( + \`message_id\` text PRIMARY KEY NOT NULL, + \`created_at\` integer, + \`updated_at\` integer, + \`username\` text, + \`message\` text, + \`thread_parent_message_id\` text, + \`channel_url\` text, + \`subreddit\` text, + \`channel_name\` text, + \`conversation_type\` text + );`, + `CREATE TABLE \`reddit_checkfile\` ( + \`filename\` text PRIMARY KEY NOT NULL, + \`sha256\` text + );`, + `CREATE TABLE \`reddit_comment_headers\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`date\` integer NOT NULL, + \`ip\` text, + \`subreddit\` text NOT NULL, + \`gildings\` integer, + \`link\` text NOT NULL, + \`parent\` text + );`, + `CREATE TABLE \`reddit_comment_votes\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`direction\` text NOT NULL + );`, + `CREATE TABLE \`reddit_comments\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`date\` integer NOT NULL, + \`ip\` text, + \`subreddit\` text NOT NULL, + \`gildings\` integer, + \`link\` text NOT NULL, + \`parent\` text, + \`body\` text, + \`media\` text + );`, + `CREATE TABLE \`reddit_drafts\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`title\` text, + \`body\` text, + \`kind\` text, + \`created\` integer, + \`spoiler\` text, + \`nsfw\` text, + \`original_content\` text, + \`content_category\` text, + \`flair_id\` text, + \`flair_text\` text, + \`send_replies\` text, + \`subreddit\` text, + \`is_public_link\` text + );`, + `CREATE TABLE \`reddit_friends\` ( + \`username\` text PRIMARY KEY NOT NULL, + \`note\` text + );`, + `CREATE TABLE \`reddit_gilded_content\` ( + \`content_link\` text PRIMARY KEY NOT NULL, + \`award\` text, + \`amount\` text, + \`date\` integer + );`, + `CREATE TABLE \`reddit_gold_received\` ( + \`content_link\` text PRIMARY KEY NOT NULL, + \`gold_received\` text, + \`gilder_username\` text, + \`date\` integer + );`, + `CREATE TABLE \`reddit_hidden_posts\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL + );`, + `CREATE TABLE \`reddit_ip_logs\` ( + \`date\` integer PRIMARY KEY NOT NULL, + \`ip\` text + );`, + `CREATE TABLE \`reddit_linked_identities\` ( + \`issuer_id\` text PRIMARY KEY NOT NULL, + \`subject_id\` text NOT NULL + );`, + `CREATE TABLE \`reddit_linked_phone_number\` ( + \`phone_number\` text PRIMARY KEY NOT NULL + );`, + `CREATE TABLE \`reddit_message_headers\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`thread_id\` text, + \`date\` integer, + \`ip\` text, + \`from\` text, + \`to\` text + );`, + `CREATE TABLE \`reddit_messages\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`thread_id\` text, + \`date\` integer, + \`ip\` text, + \`from\` text, + \`to\` text, + \`subject\` text, + \`body\` text + );`, + `CREATE TABLE \`reddit_messages_archive\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`thread_id\` text, + \`date\` integer, + \`ip\` text, + \`from\` text, + \`to\` text, + \`subject\` text, + \`body\` text + );`, + `CREATE TABLE \`reddit_messages_archive_headers\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`thread_id\` text, + \`date\` integer, + \`ip\` text, + \`from\` text, + \`to\` text + );`, + `CREATE TABLE \`reddit_moderated_subreddits\` ( + \`subreddit\` text PRIMARY KEY NOT NULL + );`, + `CREATE TABLE \`reddit_multireddits\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`display_name\` text, + \`date\` integer, + \`description\` text, + \`privacy\` text, + \`subreddits\` text, + \`image_url\` text, + \`is_owner\` text, + \`favorited\` text, + \`followers\` text + );`, + `CREATE TABLE \`reddit_payouts\` ( + \`payout_amount_usd\` text, + \`date\` integer PRIMARY KEY NOT NULL, + \`payout_id\` text + );`, + 'CREATE UNIQUE INDEX `reddit_payouts_payout_date_uq` ON `reddit_payouts` (`payout_id`,`date`);', + `CREATE TABLE \`reddit_persona\` ( + \`persona_inquiry_id\` text PRIMARY KEY NOT NULL + );`, + `CREATE TABLE \`reddit_poll_votes\` ( + \`post_id\` text PRIMARY KEY NOT NULL, + \`user_selection\` text, + \`text\` text, + \`image_url\` text, + \`is_prediction\` text, + \`stake_amount\` text + );`, + `CREATE TABLE \`reddit_post_headers\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`date\` integer NOT NULL, + \`ip\` text, + \`subreddit\` text NOT NULL, + \`gildings\` integer, + \`url\` text + );`, + `CREATE TABLE \`reddit_post_votes\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`direction\` text NOT NULL + );`, + `CREATE TABLE \`reddit_posts\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL, + \`date\` integer NOT NULL, + \`ip\` text, + \`subreddit\` text NOT NULL, + \`gildings\` integer, + \`title\` text, + \`url\` text, + \`body\` text + );`, + `CREATE TABLE \`reddit_purchases\` ( + \`processor\` text, + \`transaction_id\` text PRIMARY KEY NOT NULL, + \`product\` text, + \`date\` integer, + \`cost\` text, + \`currency\` text, + \`status\` text + );`, + `CREATE TABLE \`reddit_saved_comments\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL + );`, + `CREATE TABLE \`reddit_saved_posts\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`permalink\` text NOT NULL + );`, + `CREATE TABLE \`reddit_scheduled_posts\` ( + \`scheduled_post_id\` text PRIMARY KEY NOT NULL, + \`subreddit\` text, + \`title\` text, + \`body\` text, + \`url\` text, + \`submission_time\` integer, + \`recurrence\` text + );`, + `CREATE TABLE \`reddit_sensitive_ads_preferences\` ( + \`type\` text PRIMARY KEY NOT NULL, + \`preference\` text + );`, + `CREATE TABLE \`reddit_statistics\` ( + \`statistic\` text PRIMARY KEY NOT NULL, + \`value\` text + );`, + `CREATE TABLE \`reddit_stripe\` ( + \`stripe_account_id\` text PRIMARY KEY NOT NULL + );`, + `CREATE TABLE \`reddit_subscribed_subreddits\` ( + \`subreddit\` text PRIMARY KEY NOT NULL + );`, + `CREATE TABLE \`reddit_subscriptions\` ( + \`processor\` text, + \`subscription_id\` text PRIMARY KEY NOT NULL, + \`product\` text, + \`product_id\` text, + \`product_name\` text, + \`status\` text, + \`start_date\` integer, + \`end_date\` integer + );`, + `CREATE TABLE \`reddit_twitter\` ( + \`username\` text PRIMARY KEY NOT NULL + );`, + `CREATE TABLE \`reddit_user_preferences\` ( + \`preference\` text PRIMARY KEY NOT NULL, + \`value\` text + );`, + ], +}); diff --git a/packages/vault-core/src/adapters/reddit/src/adapter.ts b/packages/vault-core/src/adapters/reddit/src/adapter.ts new file mode 100644 index 0000000000..417e3e40aa --- /dev/null +++ b/packages/vault-core/src/adapters/reddit/src/adapter.ts @@ -0,0 +1,20 @@ +import { defineAdapter } from '@repo/vault-core'; +import { redditTransforms } from '../migrations/transforms'; +import { redditVersions } from '../migrations/versions'; +import type { RedditAdapterConfig } from './config'; +import { redditZipIngestor } from './ingestor'; +import { metadata } from './metadata'; +import * as schema from './schema'; +import { parseSchema } from './validation'; + +// Unified Reddit adapter wired for core-orchestrated validation and ingestion. +// Tag alignment between versions and transforms is enforced by core defineAdapter typing. +export const redditAdapter = defineAdapter((_?: RedditAdapterConfig) => ({ + id: 'reddit', + schema, + metadata, + validator: parseSchema, + ingestors: [redditZipIngestor], + versions: redditVersions, + transforms: redditTransforms, +})); diff --git a/packages/vault-core/src/adapters/reddit/src/csv-parse.d.ts b/packages/vault-core/src/adapters/reddit/src/csv-parse.d.ts deleted file mode 100644 index 5ca7dae8a5..0000000000 --- a/packages/vault-core/src/adapters/reddit/src/csv-parse.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -declare module 'csv-parse' { - export type Options = { - columns?: boolean | string[]; - bom?: boolean; - skip_empty_lines?: boolean; - relax_column_count?: boolean; - trim?: boolean; - }; - - export function parse( - input: string, - options: Options, - callback: ( - err: unknown | null, - records: Record[], - ) => void, - ): void; -} diff --git a/packages/vault-core/src/adapters/reddit/src/drizzle.config.ts b/packages/vault-core/src/adapters/reddit/src/drizzle.config.ts deleted file mode 100644 index 417e465508..0000000000 --- a/packages/vault-core/src/adapters/reddit/src/drizzle.config.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { fileURLToPath } from 'node:url'; -import { defineConfig } from 'drizzle-kit'; - -// Resolve paths relative to this module so they work regardless of process CWD -// Migrations live at the adapter root (../migrations), not inside src/ -// TODO custom migration format/process that doesn't rely on node:fs (??) -const out = fileURLToPath(new URL('../migrations', import.meta.url)); -const schema = fileURLToPath(new URL('./schema.ts', import.meta.url)) as string; - -export default defineConfig({ - // Using sqlite dialect; schema is in this package - dialect: 'sqlite', - casing: 'snake_case', - strict: true, - out, - - // Use absolute schema path for CLI compatibility as well - schema, - - // Every adapter *must* have a unique migrations table name, in order for everything to play nicely with other adapters - migrations: { - table: 'reddit_migrations', - }, -}); diff --git a/packages/vault-core/src/adapters/reddit/src/index.ts b/packages/vault-core/src/adapters/reddit/src/index.ts index cb7df709dc..8d3edf26a3 100644 --- a/packages/vault-core/src/adapters/reddit/src/index.ts +++ b/packages/vault-core/src/adapters/reddit/src/index.ts @@ -1,38 +1 @@ -import { defineAdapter } from '@repo/vault-core'; -import type { RedditAdapterConfig } from './config'; -import drizzleConfig from './drizzle.config'; -import { metadata } from './metadata'; -import { parseRedditExport } from './parse'; -import * as tables from './schema'; -import { upsertRedditData } from './upsert'; -import { parseSchema } from './validation'; - -// Expose all tables from schema module (runtime values only; TS types are erased) -export const schema = tables; -// ArkType infers array schemas like `[ { ... } ]` as a tuple type with one element. -// Convert any such tuple properties into standard `T[]` arrays for our parser/upsert. -type Arrayify = T extends readonly [infer E] ? E[] : T; -type Inferred = (typeof parseSchema)['infer']; -export type ParsedRedditExport = { - [K in keyof Inferred]: Arrayify; -}; -// Back-compat for consumers still importing ParseResult from this module -export type ParseResult = ParsedRedditExport; - -// Adapter export -export const redditAdapter = defineAdapter((args: RedditAdapterConfig) => { - args; // TODO - - const adapter = { - id: 'reddit', - name: 'Reddit Adapter', - schema, - metadata, - validator: parseSchema, - drizzleConfig, - parse: parseRedditExport, - upsert: upsertRedditData, - }; - - return adapter; -}); +export { redditAdapter } from './adapter'; diff --git a/packages/vault-core/src/adapters/reddit/src/ingestor.ts b/packages/vault-core/src/adapters/reddit/src/ingestor.ts new file mode 100644 index 0000000000..e47e8ae6d0 --- /dev/null +++ b/packages/vault-core/src/adapters/reddit/src/ingestor.ts @@ -0,0 +1,15 @@ +import { defineIngestor } from '@repo/vault-core'; +import { parseRedditExport as parse } from './parse'; + +/** + * ZIP ingestor for Reddit GDPR export. + * - Matches a single .zip File + * - Parses via existing parseRedditExport (Blob-compatible) + * - Returns normalized payload ready for validation/upsert + */ +export const redditZipIngestor = defineIngestor({ + matches(file) { + return /\.zip$/i.test(file.name ?? ''); + }, + parse, +}); diff --git a/packages/vault-core/src/adapters/reddit/src/metadata.ts b/packages/vault-core/src/adapters/reddit/src/metadata.ts index a6410209eb..952c30f58b 100644 --- a/packages/vault-core/src/adapters/reddit/src/metadata.ts +++ b/packages/vault-core/src/adapters/reddit/src/metadata.ts @@ -1,9 +1,11 @@ +import type { AdapterMetadata } from '@repo/vault-core'; +import type * as schema from './schema'; + export const metadata = { reddit_posts: { id: 'Reddit post id (base36)', permalink: 'Full permalink URL to the post', - date: 'Original timestamp string from export (e.g. 2025-05-18 04:35:32 UTC)', - created_utc: 'Unix epoch seconds derived from date', + date: 'Timestamp (UTC). Coerced to Date from export string/epoch', ip: 'Recorded IP address associated with the post event, if present', subreddit: 'Subreddit name where the post was made (e.g. sveltejs)', gildings: 'Number of gildings on the post (integer)', @@ -14,17 +16,14 @@ export const metadata = { reddit_comments: { id: 'Reddit comment id (base36)', permalink: 'Full permalink URL to the comment', - date: 'Original timestamp string from export (e.g. 2025-05-18 04:35:32 UTC)', - created_utc: 'Unix epoch seconds derived from date', + date: 'Timestamp (UTC). Coerced to Date from export string/epoch', ip: 'Recorded IP address associated with the comment event, if present', subreddit: 'Subreddit name where the comment was made', gildings: 'Number of gildings on the comment (integer)', link: 'Permalink URL to the parent post of this comment (CSV “link” field)', - post_id: - 'Derived base36 id of the parent post when extractable from link/permalink; NULL otherwise', parent: 'CSV “parent” field; thing id of parent post or comment when present', body: 'Comment body text', media: 'Media info field from CSV when present', }, -} as const; +} satisfies AdapterMetadata; diff --git a/packages/vault-core/src/adapters/reddit/src/parse.ts b/packages/vault-core/src/adapters/reddit/src/parse.ts index 328fa5f12e..78803ac6c5 100644 --- a/packages/vault-core/src/adapters/reddit/src/parse.ts +++ b/packages/vault-core/src/adapters/reddit/src/parse.ts @@ -1,552 +1,67 @@ -import { parse as csvParse } from 'csv-parse'; -import { unzipSync } from 'fflate'; -import type { ParsedRedditExport } from './index'; +import { ZIP } from '../../../utils/archive/zip'; +import { CSV } from '../../../utils/format/csv'; -/** - * CSV record shape directly from export files - * We keep fields loose (string | undefined) and coerce downstream. - */ -type RawRecord = Record; - -// Result type comes from ArkType schema in index.ts -// We return a ParsedRedditExport object, matching parseSchema. - -export async function parseRedditExport( - file: Blob, -): Promise { +export async function parseRedditExport(file: Blob) { // Read entire zip as Uint8Array const ab = await file.arrayBuffer(); - const zipMap = unzipSync(new Uint8Array(ab)); // { [filename]: Uint8Array } + const zipMap = await ZIP.unpack(new Uint8Array(ab)); // { [filename]: Uint8Array } // Read+parse helpers const decode = (bytes: Uint8Array) => new TextDecoder('utf-8', { fatal: false, ignoreBOM: true }).decode(bytes); - const readCsvText = (name: string): string => { + const readCsv = async (name: string) => { const bytes = zipMap[name]; - return bytes ? decode(bytes) : ''; + const csvText = bytes ? decode(bytes) : ''; + return CSV.parse(csvText); }; - const readCsv = async (name: string): Promise => - parseCsv(readCsvText(name)); - - // Parse CSVs used today - const postsRecords = await readCsv('posts.csv'); - const commentsRecords = await readCsv('comments.csv'); - - // Map to normalized shapes with coercions/derivations - const posts = postsRecords - .map((r) => mapPost(r)) - .filter((p) => p !== undefined); - - const comments = commentsRecords - .map((r) => mapComment(r)) - .filter((c) => c !== undefined); - - // Lightly mapped datasets (string fields only; optional everywhere) - const post_headers = (await readCsv('post_headers.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - date: toDate(u(r.date)), - ip: u(r.ip), - subreddit: u(r.subreddit), - gildings: numOrUndefined(r.gildings), - url: u(r.url), - })); - - const comment_headers = (await readCsv('comment_headers.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - date: toDate(u(r.date)), - ip: u(r.ip), - subreddit: u(r.subreddit), - gildings: numOrUndefined(r.gildings), - link: u(r.link), - parent: u(r.parent), - })); - - const post_votes = (await readCsv('post_votes.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - direction: u(r.direction), - })); - - const comment_votes = (await readCsv('comment_votes.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - direction: u(r.direction), - })); - - const saved_posts = (await readCsv('saved_posts.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - })); - - const saved_comments = (await readCsv('saved_comments.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - })); - - const hidden_posts = (await readCsv('hidden_posts.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - })); - - const message_headers = (await readCsv('message_headers.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - thread_id: u(r.thread_id), - date: toDate(u(r.date)), - ip: u(r.ip), - from: u(r.from), - to: u(r.to), - })); - - const messages = (await readCsv('messages.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - thread_id: u(r.thread_id), - date: toDate(u(r.date)), - ip: u(r.ip), - from: u(r.from), - to: u(r.to), - subject: u(r.subject), - body: u(r.body), - })); - - const messages_archive_headers = ( - await readCsv('messages_archive_headers.csv') - ).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - thread_id: u(r.thread_id), - date: toDate(u(r.date)), - ip: u(r.ip), - from: u(r.from), - to: u(r.to), - })); - - const messages_archive = (await readCsv('messages_archive.csv')).map((r) => ({ - id: u(r.id), - permalink: u(r.permalink), - thread_id: u(r.thread_id), - date: toDate(u(r.date)), - ip: u(r.ip), - from: u(r.from), - to: u(r.to), - subject: u(r.subject), - body: u(r.body), - })); - - const chat_history = (await readCsv('chat_history.csv')).map((r) => ({ - message_id: u(r.message_id), - created_at: toDate(u(r.created_at)), - updated_at: toDate(u(r.updated_at)), - username: u(r.username), - message: u(r.message), - thread_parent_message_id: u(r.thread_parent_message_id), - channel_url: u(r.channel_url), - subreddit: u(r.subreddit), - channel_name: u(r.channel_name), - conversation_type: u(r.conversation_type), - })); - - const account_gender = (await readCsv('account_gender.csv')).map((r) => ({ - account_gender: u(r.account_gender), - })); - - const sensitive_ads_preferences = ( - await readCsv('sensitive_ads_preferences.csv') - ).map((r) => ({ - type: u(r.type), - preference: u(r.preference), - })); - - const birthdate = (await readCsv('birthdate.csv')).map((r) => ({ - birthdate: toDate(u(r.birthdate)), - verified_birthdate: toDate(u(r.verified_birthdate)), - verification_state: u(r.verification_state), - verification_method: u(r.verification_method), - })); - - const user_preferences = (await readCsv('user_preferences.csv')).map((r) => ({ - preference: u(r.preference), - value: u(r.value), - })); - - const linked_identities = (await readCsv('linked_identities.csv')).map( - (r) => ({ - issuer_id: u(r.issuer_id), - subject_id: u(r.subject_id), - }), - ); - - const linked_phone_number = (await readCsv('linked_phone_number.csv')).map( - (r) => ({ - phone_number: u(r.phone_number), - }), - ); - - const twitter = (await readCsv('twitter.csv')).map((r) => ({ - username: u(r.username), - })); - - const approved_submitter_subreddits = ( - await readCsv('approved_submitter_subreddits.csv') - ).map((r) => ({ - subreddit: u(r.subreddit), - })); - - const moderated_subreddits = (await readCsv('moderated_subreddits.csv')).map( - (r) => ({ - subreddit: u(r.subreddit), - }), + const files = [ + 'posts', + 'comments', + 'post_headers', + 'comment_headers', + 'post_votes', + 'comment_votes', + 'saved_posts', + 'saved_comments', + 'hidden_posts', + 'message_headers', + 'messages', + 'messages_archive_headers', + 'messages_archive', + 'chat_history', + 'account_gender', + 'sensitive_ads_preferences', + 'birthdate', + 'user_preferences', + 'linked_identities', + 'linked_phone_number', + 'twitter', + 'approved_submitter_subreddits', + 'moderated_subreddits', + 'subscribed_subreddits', + 'multireddits', + 'purchases', + 'subscriptions', + 'payouts', + 'stripe', + 'announcements', + 'drafts', + 'friends', + 'gilded_content', + 'gold_received', + 'ip_logs', + 'persona', + 'poll_votes', + 'scheduled_posts', + 'statistics', + 'checkfile', + ] as const; + + return Object.fromEntries[]>( + await Promise.all( + files.map(async (f) => [f, await readCsv(`${f}.csv`)] as const), + ), ); - - const subscribed_subreddits = ( - await readCsv('subscribed_subreddits.csv') - ).map((r) => ({ - subreddit: u(r.subreddit), - })); - - const multireddits = (await readCsv('multireddits.csv')).map((r) => ({ - id: u(r.id), - display_name: u(r.display_name), - date: toDate(u(r.date)), - description: u(r.description), - privacy: u(r.privacy), - subreddits: u(r.subreddits), - image_url: u(r.image_url), - is_owner: u(r.is_owner), - favorited: u(r.favorited), - followers: u(r.followers), - })); - - const purchases = (await readCsv('purchases.csv')).map((r) => ({ - processor: u(r.processor), - transaction_id: u(r.transaction_id), - product: u(r.product), - date: toDate(u(r.date)), - cost: u(r.cost), - currency: u(r.currency), - status: u(r.status), - })); - - const subscriptions = (await readCsv('subscriptions.csv')).map((r) => ({ - processor: u(r.processor), - subscription_id: u(r.subscription_id), - product: u(r.product), - product_id: u(r.product_id), - product_name: u(r.product_name), - status: u(r.status), - start_date: toDate(u(r.start_date)), - end_date: toDate(u(r.end_date)), - })); - - const payouts = (await readCsv('payouts.csv')).map((r) => ({ - payout_amount_usd: u(r.payout_amount_usd), - date: toDate(u(r.date)), - payout_id: u(r.payout_id), - })); - - const stripe = (await readCsv('stripe.csv')).map((r) => ({ - stripe_account_id: u(r.stripe_account_id), - })); - - const announcements = (await readCsv('announcements.csv')).map((r) => ({ - announcement_id: u(r.announcement_id), - sent_at: toDate(u(r.sent_at)), - read_at: toDate(u(r.read_at)), - from_id: u(r.from_id), - from_username: u(r.from_username), - subject: u(r.subject), - body: u(r.body), - url: u(r.url), - })); - - const drafts = (await readCsv('drafts.csv')).map((r) => ({ - id: u(r.id), - title: u(r.title), - body: u(r.body), - kind: u(r.kind), - created: toDate(u(r.created)), - spoiler: u(r.spoiler), - nsfw: u(r.nsfw), - original_content: u(r.original_content), - content_category: u(r.content_category), - flair_id: u(r.flair_id), - flair_text: u(r.flair_text), - send_replies: u(r.send_replies), - subreddit: u(r.subreddit), - is_public_link: u(r.is_public_link), - })); - - const friends = (await readCsv('friends.csv')).map((r) => ({ - username: u(r.username), - note: u(r.note), - })); - - const gilded_content = (await readCsv('gilded_content.csv')).map((r) => ({ - content_link: u(r.content_link), - award: u(r.award), - amount: u(r.amount), - date: toDate(u(r.date)), - })); - - const gold_received = (await readCsv('gold_received.csv')).map((r) => ({ - content_link: u(r.content_link), - gold_received: u(r.gold_received), - gilder_username: u(r.gilder_username), - date: toDate(u(r.date)), - })); - - const ip_logs = (await readCsv('ip_logs.csv')).map((r) => ({ - date: toDate(u(r.date)), - ip: u(r.ip), - })); - - const persona = (await readCsv('persona.csv')).map((r) => ({ - persona_inquiry_id: u(r.persona_inquiry_id), - })); - - const poll_votes = (await readCsv('poll_votes.csv')).map((r) => ({ - post_id: u(r.post_id), - user_selection: u(r.user_selection), - text: u(r.text), - image_url: u(r.image_url), - is_prediction: u(r.is_prediction), - stake_amount: u(r.stake_amount), - })); - - const scheduled_posts = (await readCsv('scheduled_posts.csv')).map((r) => ({ - scheduled_post_id: u(r.scheduled_post_id), - subreddit: u(r.subreddit), - title: u(r.title), - body: u(r.body), - url: u(r.url), - submission_time: toDate(u(r.submission_time)), - recurrence: u(r.recurrence), - })); - - const statistics = (await readCsv('statistics.csv')).map((r) => ({ - statistic: u(r.statistic), - value: u(r.value), - })); - - const checkfile = (await readCsv('checkfile.csv')).map((r) => ({ - filename: u(r.filename), - sha256: u(r.sha256), - })); - - return { - // Core content - posts: dropEmptyRows(posts), - post_headers: dropEmptyRows(post_headers), - comments: dropEmptyRows(comments), - comment_headers: dropEmptyRows(comment_headers), - - // Votes / visibility / saves - post_votes: dropEmptyRows(post_votes), - comment_votes: dropEmptyRows(comment_votes), - saved_posts: dropEmptyRows(saved_posts), - saved_comments: dropEmptyRows(saved_comments), - hidden_posts: dropEmptyRows(hidden_posts), - - // Messaging - message_headers: dropEmptyRows(message_headers), - messages: dropEmptyRows(messages), - messages_archive_headers: dropEmptyRows(messages_archive_headers), - messages_archive: dropEmptyRows(messages_archive), - - // Chat - chat_history: dropEmptyRows(chat_history), - - // Account and preferences - account_gender: dropEmptyRows(account_gender), - sensitive_ads_preferences: dropEmptyRows(sensitive_ads_preferences), - birthdate: dropEmptyRows(birthdate), - user_preferences: dropEmptyRows(user_preferences), - linked_identities: dropEmptyRows(linked_identities), - linked_phone_number: dropEmptyRows(linked_phone_number), - twitter: dropEmptyRows(twitter), - - // Moderation / subscriptions / subreddits - approved_submitter_subreddits: dropEmptyRows(approved_submitter_subreddits), - moderated_subreddits: dropEmptyRows(moderated_subreddits), - subscribed_subreddits: dropEmptyRows(subscribed_subreddits), - multireddits: dropEmptyRows(multireddits), - - // Commerce and payouts - purchases: dropEmptyRows(purchases), - subscriptions: dropEmptyRows(subscriptions), - payouts: dropEmptyRows(payouts), - stripe: dropEmptyRows(stripe), - - // Misc - announcements: dropEmptyRows(announcements), - drafts: dropEmptyRows(drafts), - friends: dropEmptyRows(friends), - gilded_content: dropEmptyRows(gilded_content), - gold_received: dropEmptyRows(gold_received), - ip_logs: dropEmptyRows(ip_logs), - persona: dropEmptyRows(persona), - poll_votes: dropEmptyRows(poll_votes), - scheduled_posts: dropEmptyRows(scheduled_posts), - statistics: dropEmptyRows(statistics), - checkfile: dropEmptyRows(checkfile), - }; -} - -/** - * csv-parse promise wrapper for convenience - */ -function parseCsv(input: string): Promise { - return new Promise((resolve, reject) => { - if (!input || input.trim().length === 0) return resolve([]); - csvParse( - input, - { - columns: true, - bom: true, - skip_empty_lines: true, - relax_column_count: true, - trim: true, - }, - (err: unknown | null, records: RawRecord[]) => { - if (err) reject(err); - else resolve(records); - }, - ); - }); -} - -/** - * Coercion helpers - */ -function u(v: string | undefined): string | undefined { - return blankToUndefined(v); -} -function blankToUndefined(v: string | undefined): string | undefined { - if (v == null) return undefined; - const t = v.trim(); - return t === '' ? undefined : t; -} -function numOrUndefined(v: string | undefined): number | undefined { - const t = blankToUndefined(v); - if (t == null) return undefined; - const n = Number(t); - return Number.isFinite(n) ? n : undefined; -} -function toDate(dateStr: string | undefined): Date | undefined { - const s = blankToUndefined(dateStr); - if (!s) return undefined; - const num = Number(s); - if (Number.isFinite(num)) { - const ms = num < 1e12 ? num * 1000 : num; - const d = new Date(ms); - return Number.isNaN(d.getTime()) ? undefined : d; - } - const d = new Date(s); - return Number.isNaN(d.getTime()) ? undefined : d; -} -function extractPostIdFromUrl(urlStr: string | undefined): string | undefined { - if (!urlStr) return undefined; - try { - const u = new URL(urlStr); - // Example: /r/sveltejs/comments/1kp9tv3/transitions.../mswmz2d/ - const parts = u.pathname.split('/').filter(Boolean); - const idx = parts.indexOf('comments'); - if (idx >= 0 && parts.length > idx + 1) { - const candidate = parts[idx + 1]?.trim(); - if (candidate) return candidate; - } - } catch { - // non-URL or malformed; ignore - } - return undefined; -} - -/** - * Filter helpers to remove rows that are entirely empty after coercion - * (i.e., every property is undefined or null). This protects downstream upserts - * from generating "No values to set" when a CSV contains a single blank row. - */ -function isNonEmptyRow(obj: Record): boolean { - for (const v of Object.values(obj)) { - if (v !== undefined && v !== null) return true; - } - return false; -} -function dropEmptyRows>(rows: T[]): T[] { - return rows.filter((r) => isNonEmptyRow(r as Record)); -} - -/** - * Mapping: posts.csv - * Headers: id,permalink,date,ip,subreddit,gildings,title,url,body - */ -function mapPost( - r: RawRecord, -): ParsedRedditExport['posts'][number] | undefined { - const id = u(r.id); - const permalink = u(r.permalink); - const dateStr = u(r.date); - const subreddit = u(r.subreddit); - - if (!id || !permalink || !dateStr || !subreddit) return undefined; - - const date = toDate(dateStr); - if (date == null) return undefined; - - const created_utc = date; - - return { - id, - permalink, - date, - created_utc, - ip: u(r.ip), - subreddit, - gildings: numOrUndefined(r.gildings), - title: u(r.title), - url: u(r.url), - body: u(r.body), - }; -} - -/** - * Mapping: comments.csv - * Headers: id,permalink,date,ip,subreddit,gildings,link,parent,body,media - */ -function mapComment( - r: RawRecord, -): ParsedRedditExport['comments'][number] | undefined { - const id = u(r.id); - const permalink = u(r.permalink); - const dateStr = u(r.date); - const subreddit = u(r.subreddit); - const link = u(r.link); - - if (!id || !permalink || !dateStr || !subreddit || !link) return undefined; - - const date = toDate(dateStr); - if (date == null) return undefined; - - const created_utc = date; - const post_id = extractPostIdFromUrl(link) ?? extractPostIdFromUrl(permalink); - - return { - id, - permalink, - date, - created_utc, - ip: u(r.ip), - subreddit, - gildings: numOrUndefined(r.gildings), - link, - post_id, - parent: u(r.parent), - body: u(r.body), - media: u(r.media), - }; } diff --git a/packages/vault-core/src/adapters/reddit/src/schema.ts b/packages/vault-core/src/adapters/reddit/src/schema.ts index 91b8c199b2..d18f45eb26 100644 --- a/packages/vault-core/src/adapters/reddit/src/schema.ts +++ b/packages/vault-core/src/adapters/reddit/src/schema.ts @@ -9,10 +9,9 @@ import { * Core content tables */ export const reddit_posts = sqliteTable('reddit_posts', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), date: integer('date', { mode: 'timestamp' }).notNull(), - created_utc: integer('created_utc', { mode: 'timestamp' }).notNull(), ip: text('ip'), subreddit: text('subreddit').notNull(), gildings: integer('gildings'), @@ -22,7 +21,7 @@ export const reddit_posts = sqliteTable('reddit_posts', { }); export const reddit_post_headers = sqliteTable('reddit_post_headers', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), date: integer('date', { mode: 'timestamp' }).notNull(), ip: text('ip'), @@ -32,22 +31,20 @@ export const reddit_post_headers = sqliteTable('reddit_post_headers', { }); export const reddit_comments = sqliteTable('reddit_comments', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), date: integer('date', { mode: 'timestamp' }).notNull(), - created_utc: integer('created_utc', { mode: 'timestamp' }).notNull(), ip: text('ip'), subreddit: text('subreddit').notNull(), gildings: integer('gildings'), link: text('link').notNull(), - post_id: text('post_id'), parent: text('parent'), body: text('body'), media: text('media'), }); export const reddit_comment_headers = sqliteTable('reddit_comment_headers', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), date: integer('date', { mode: 'timestamp' }).notNull(), ip: text('ip'), @@ -61,29 +58,29 @@ export const reddit_comment_headers = sqliteTable('reddit_comment_headers', { * Votes, saves, visibility */ export const reddit_post_votes = sqliteTable('reddit_post_votes', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), direction: text('direction').notNull(), // up/down/none }); export const reddit_comment_votes = sqliteTable('reddit_comment_votes', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), direction: text('direction').notNull(), }); export const reddit_saved_posts = sqliteTable('reddit_saved_posts', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), }); export const reddit_saved_comments = sqliteTable('reddit_saved_comments', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), }); export const reddit_hidden_posts = sqliteTable('reddit_hidden_posts', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), }); @@ -91,7 +88,7 @@ export const reddit_hidden_posts = sqliteTable('reddit_hidden_posts', { * Messaging */ export const reddit_message_headers = sqliteTable('reddit_message_headers', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), thread_id: text('thread_id'), date: integer('date', { mode: 'timestamp' }), @@ -101,7 +98,7 @@ export const reddit_message_headers = sqliteTable('reddit_message_headers', { }); export const reddit_messages = sqliteTable('reddit_messages', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), thread_id: text('thread_id'), date: integer('date', { mode: 'timestamp' }), @@ -115,7 +112,7 @@ export const reddit_messages = sqliteTable('reddit_messages', { export const reddit_messages_archive_headers = sqliteTable( 'reddit_messages_archive_headers', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), thread_id: text('thread_id'), date: integer('date', { mode: 'timestamp' }), @@ -126,7 +123,7 @@ export const reddit_messages_archive_headers = sqliteTable( ); export const reddit_messages_archive = sqliteTable('reddit_messages_archive', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), permalink: text('permalink').notNull(), thread_id: text('thread_id'), date: integer('date', { mode: 'timestamp' }), @@ -141,7 +138,7 @@ export const reddit_messages_archive = sqliteTable('reddit_messages_archive', { * Chat */ export const reddit_chat_history = sqliteTable('reddit_chat_history', { - message_id: text('message_id').primaryKey(), + message_id: text('message_id').primaryKey().notNull(), created_at: integer('created_at', { mode: 'timestamp' }), updated_at: integer('updated_at', { mode: 'timestamp' }), username: text('username'), @@ -179,32 +176,27 @@ export const reddit_birthdate = sqliteTable('reddit_birthdate', { }); export const reddit_user_preferences = sqliteTable('reddit_user_preferences', { - preference: text('preference').primaryKey(), + preference: text('preference').primaryKey().notNull(), value: text('value'), }); export const reddit_linked_identities = sqliteTable( 'reddit_linked_identities', { - issuer_id: text('issuer_id'), - subject_id: text('subject_id'), + issuer_id: text('issuer_id').primaryKey().notNull(), + subject_id: text('subject_id').notNull(), }, - (t) => ({ - uq_issuer_subject: uniqueIndex( - 'reddit_linked_identities_issuer_subject_uq', - ).on(t.issuer_id, t.subject_id), - }), ); export const reddit_linked_phone_number = sqliteTable( 'reddit_linked_phone_number', { - phone_number: text('phone_number').primaryKey(), + phone_number: text('phone_number').primaryKey().notNull(), }, ); export const reddit_twitter = sqliteTable('reddit_twitter', { - username: text('username').primaryKey(), + username: text('username').primaryKey().notNull(), }); /** @@ -213,26 +205,26 @@ export const reddit_twitter = sqliteTable('reddit_twitter', { export const reddit_approved_submitter_subreddits = sqliteTable( 'reddit_approved_submitter_subreddits', { - subreddit: text('subreddit').primaryKey(), + subreddit: text('subreddit').primaryKey().notNull(), }, ); export const reddit_moderated_subreddits = sqliteTable( 'reddit_moderated_subreddits', { - subreddit: text('subreddit').primaryKey(), + subreddit: text('subreddit').primaryKey().notNull(), }, ); export const reddit_subscribed_subreddits = sqliteTable( 'reddit_subscribed_subreddits', { - subreddit: text('subreddit').primaryKey(), + subreddit: text('subreddit').primaryKey().notNull(), }, ); export const reddit_multireddits = sqliteTable('reddit_multireddits', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), display_name: text('display_name'), date: integer('date', { mode: 'timestamp' }), description: text('description'), @@ -249,7 +241,7 @@ export const reddit_multireddits = sqliteTable('reddit_multireddits', { */ export const reddit_purchases = sqliteTable('reddit_purchases', { processor: text('processor'), - transaction_id: text('transaction_id').primaryKey(), + transaction_id: text('transaction_id').primaryKey().notNull(), product: text('product'), date: integer('date', { mode: 'timestamp' }), cost: text('cost'), @@ -259,7 +251,7 @@ export const reddit_purchases = sqliteTable('reddit_purchases', { export const reddit_subscriptions = sqliteTable('reddit_subscriptions', { processor: text('processor'), - subscription_id: text('subscription_id').primaryKey(), + subscription_id: text('subscription_id').primaryKey().notNull(), product: text('product'), product_id: text('product_id'), product_name: text('product_name'), @@ -272,7 +264,7 @@ export const reddit_payouts = sqliteTable( 'reddit_payouts', { payout_amount_usd: text('payout_amount_usd'), - date: integer('date', { mode: 'timestamp' }), + date: integer('date', { mode: 'timestamp' }).primaryKey().notNull(), payout_id: text('payout_id'), }, (t) => ({ @@ -284,14 +276,14 @@ export const reddit_payouts = sqliteTable( ); export const reddit_stripe = sqliteTable('reddit_stripe', { - stripe_account_id: text('stripe_account_id').primaryKey(), + stripe_account_id: text('stripe_account_id').primaryKey().notNull(), }); /** * Misc content and utility */ export const reddit_announcements = sqliteTable('reddit_announcements', { - announcement_id: text('announcement_id').primaryKey(), + announcement_id: text('announcement_id').primaryKey().notNull(), sent_at: integer('sent_at', { mode: 'timestamp' }), read_at: integer('read_at', { mode: 'timestamp' }), from_id: text('from_id'), @@ -302,7 +294,7 @@ export const reddit_announcements = sqliteTable('reddit_announcements', { }); export const reddit_drafts = sqliteTable('reddit_drafts', { - id: text('id').primaryKey(), + id: text('id').primaryKey().notNull(), title: text('title'), body: text('body'), kind: text('kind'), @@ -323,69 +315,37 @@ export const reddit_friends = sqliteTable('reddit_friends', { note: text('note'), }); -export const reddit_gilded_content = sqliteTable( - 'reddit_gilded_content', - { - content_link: text('content_link'), - award: text('award'), - amount: text('amount'), - date: integer('date', { mode: 'timestamp' }), - }, - (t) => ({ - uq_content_award_date: uniqueIndex( - 'reddit_gilded_content_content_award_date_uq', - ).on(t.content_link, t.award, t.date), - }), -); +export const reddit_gilded_content = sqliteTable('reddit_gilded_content', { + content_link: text('content_link').primaryKey().notNull(), + award: text('award'), + amount: text('amount'), + date: integer('date', { mode: 'timestamp' }), +}); -export const reddit_gold_received = sqliteTable( - 'reddit_gold_received', - { - content_link: text('content_link'), - gold_received: text('gold_received'), - gilder_username: text('gilder_username'), - date: integer('date', { mode: 'timestamp' }), - }, - (t) => ({ - uq_content_date: uniqueIndex('reddit_gold_received_content_date_uq').on( - t.content_link, - t.date, - ), - }), -); +export const reddit_gold_received = sqliteTable('reddit_gold_received', { + content_link: text('content_link').primaryKey().notNull(), + gold_received: text('gold_received'), + gilder_username: text('gilder_username'), + date: integer('date', { mode: 'timestamp' }), +}); -export const reddit_ip_logs = sqliteTable( - 'reddit_ip_logs', - { - date: integer('date', { mode: 'timestamp' }), - ip: text('ip'), - }, - (t) => ({ - uq_date_ip: uniqueIndex('reddit_ip_logs_date_ip_uq').on(t.date, t.ip), - }), -); +export const reddit_ip_logs = sqliteTable('reddit_ip_logs', { + date: integer('date', { mode: 'timestamp' }).primaryKey().notNull(), + ip: text('ip'), +}); export const reddit_persona = sqliteTable('reddit_persona', { persona_inquiry_id: text('persona_inquiry_id').primaryKey(), }); -export const reddit_poll_votes = sqliteTable( - 'reddit_poll_votes', - { - post_id: text('post_id'), - user_selection: text('user_selection'), - text: text('text'), - image_url: text('image_url'), - is_prediction: text('is_prediction'), - stake_amount: text('stake_amount'), - }, - (t) => ({ - uq_post_user: uniqueIndex('reddit_poll_votes_post_user_uq').on( - t.post_id, - t.user_selection, - ), - }), -); +export const reddit_poll_votes = sqliteTable('reddit_poll_votes', { + post_id: text('post_id').primaryKey().notNull(), + user_selection: text('user_selection'), + text: text('text'), + image_url: text('image_url'), + is_prediction: text('is_prediction'), + stake_amount: text('stake_amount'), +}); export const reddit_scheduled_posts = sqliteTable('reddit_scheduled_posts', { scheduled_post_id: text('scheduled_post_id').primaryKey(), diff --git a/packages/vault-core/src/adapters/reddit/src/upsert.ts b/packages/vault-core/src/adapters/reddit/src/upsert.ts deleted file mode 100644 index ddb7a2ed8f..0000000000 --- a/packages/vault-core/src/adapters/reddit/src/upsert.ts +++ /dev/null @@ -1,471 +0,0 @@ -import type { - AnySQLiteColumn, - BaseSQLiteDatabase, - SQLiteTable, -} from 'drizzle-orm/sqlite-core'; -import type { ParsedRedditExport } from '.'; -import { - reddit_account_gender, - reddit_announcements, - reddit_approved_submitter_subreddits, - reddit_birthdate, - reddit_chat_history, - reddit_checkfile, - reddit_comment_headers, - reddit_comment_votes, - reddit_comments, - reddit_drafts, - reddit_friends, - reddit_gilded_content, - reddit_gold_received, - reddit_hidden_posts, - reddit_ip_logs, - reddit_linked_identities, - reddit_linked_phone_number, - reddit_message_headers, - reddit_messages, - reddit_messages_archive, - reddit_messages_archive_headers, - reddit_moderated_subreddits, - reddit_multireddits, - reddit_payouts, - reddit_persona, - reddit_poll_votes, - reddit_post_headers, - reddit_post_votes, - reddit_posts, - reddit_purchases, - reddit_saved_comments, - reddit_saved_posts, - reddit_scheduled_posts, - reddit_sensitive_ads_preferences, - reddit_statistics, - reddit_stripe, - reddit_subscribed_subreddits, - reddit_subscriptions, - reddit_twitter, - reddit_user_preferences, -} from './schema'; - -// Parser now emits Date objects for all timestamp fields per parseSchema; no extra coercion needed here. - -/** - * Small utility to chunk arrays for batched inserts to keep statements reasonable. - */ -function _chunk(arr: T[], size: number): T[][] { - if (size <= 0) return [arr]; - const out: T[][] = []; - for (let i = 0; i < arr.length; i += size) out.push(arr.slice(i, i + size)); - return out; -} - -/** - * Remove undefined/null fields from a row. If nothing remains, it's an "empty" row. - */ -function pruneRow>(row: T): Partial { - const cleaned: Partial = {}; - for (const [k, v] of Object.entries(row)) { - if (v !== undefined && v !== null) { - (cleaned as Record)[k] = v; - } - } - return cleaned; -} - -/** - * Generic, safe onConflict upsert for a single row object. - * Falls back to per-row inserts to preserve idempotency and simplicity. - */ -async function upsertOne( - tx: unknown, - table: SQLiteTable, - row: T, - target: unknown, // allow column or composite array -): Promise { - // Guard against empty CSV rows that parse into all-undefined fields. - const cleaned = pruneRow( - row as unknown as Record, - ) as Partial; - if (Object.keys(cleaned).length === 0) { - // Nothing to insert/update; skip silently. - return; - } - const anyTx = tx as { - insert: (tbl: SQLiteTable) => { - values: (v: Partial) => { - onConflictDoUpdate: (args: { - target: unknown; - set: Partial; - }) => Promise; - }; - }; - }; - await anyTx.insert(table).values(cleaned).onConflictDoUpdate({ - target, - set: cleaned, - }); -} - -/** - * Upsert many rows by iterating records with conflict handling. - * Uses small per-row statements to stay simple and avoid driver limits. - */ -async function upsertMany( - tx: unknown, - table: SQLiteTable, - rows: T[], - target: AnySQLiteColumn | readonly AnySQLiteColumn[], -): Promise { - for (const r of rows) { - await upsertOne(tx, table, r, target); - } -} - -/** - * Implement the Adapter.upsert contract. - * - * Notes: - * - Upserts are performed in a single transaction. - * - We intentionally avoid adding FKs in v1 per export inconsistencies. - */ -export async function upsertRedditData( - db: BaseSQLiteDatabase<'sync' | 'async', Record>, - data: ParsedRedditExport, -): Promise { - const provider: - | BaseSQLiteDatabase<'sync' | 'async', Record> - | { transaction: (fn: (tx: unknown) => Promise) => Promise } = - typeof (db as unknown as { transaction?: unknown }).transaction === - 'function' - ? db - : { - transaction: async (fn: (tx: unknown) => Promise) => - fn(db as unknown), - }; - - await provider.transaction(async (tx: unknown) => { - // Parser already returns Date objects for timestamp fields. Use data as-is. - const posts = data.posts ?? []; - const post_headers = data.post_headers ?? []; - const comments = data.comments ?? []; - const comment_headers = data.comment_headers ?? []; - const message_headers = data.message_headers ?? []; - const messages = data.messages ?? []; - const messages_archive_headers = data.messages_archive_headers ?? []; - const messages_archive = data.messages_archive ?? []; - const chat_history = data.chat_history ?? []; - const birthdate = data.birthdate ?? []; - const multireddits = data.multireddits ?? []; - const purchases = data.purchases ?? []; - const subscriptions = data.subscriptions ?? []; - const payouts = data.payouts ?? []; - const announcements = data.announcements ?? []; - const drafts = data.drafts ?? []; - const gilded_content = data.gilded_content ?? []; - const gold_received = data.gold_received ?? []; - const ip_logs = data.ip_logs ?? []; - const scheduled_posts = data.scheduled_posts ?? []; - // Core content - if (posts?.length) { - await upsertMany(tx, reddit_posts, posts, reddit_posts.id); - } - if (post_headers?.length) { - await upsertMany( - tx, - reddit_post_headers, - post_headers, - reddit_post_headers.id, - ); - } - if (comments?.length) { - await upsertMany(tx, reddit_comments, comments, reddit_comments.id); - } - if (comment_headers?.length) { - await upsertMany( - tx, - reddit_comment_headers, - comment_headers, - reddit_comment_headers.id, - ); - } - - // Votes / visibility / saves - if (data.post_votes?.length) { - // Use id as the conflict target to satisfy SQLite UNIQUE/PK requirements in v1 - await upsertMany( - tx, - reddit_post_votes, - data.post_votes, - reddit_post_votes.id, - ); - } - if (data.comment_votes?.length) { - // Use id as the conflict target to satisfy SQLite UNIQUE/PK requirements in v1 - await upsertMany( - tx, - reddit_comment_votes, - data.comment_votes, - reddit_comment_votes.id, - ); - } - if (data.saved_posts?.length) { - await upsertMany( - tx, - reddit_saved_posts, - data.saved_posts, - reddit_saved_posts.id, - ); - } - if (data.saved_comments?.length) { - await upsertMany( - tx, - reddit_saved_comments, - data.saved_comments, - reddit_saved_comments.id, - ); - } - if (data.hidden_posts?.length) { - await upsertMany( - tx, - reddit_hidden_posts, - data.hidden_posts, - reddit_hidden_posts.id, - ); - } - - // Messaging - if (message_headers?.length) { - await upsertMany( - tx, - reddit_message_headers, - message_headers, - reddit_message_headers.id, - ); - } - if (messages?.length) { - await upsertMany(tx, reddit_messages, messages, reddit_messages.id); - } - if (messages_archive_headers?.length) { - await upsertMany( - tx, - reddit_messages_archive_headers, - messages_archive_headers, - reddit_messages_archive_headers.id, - ); - } - if (messages_archive?.length) { - await upsertMany( - tx, - reddit_messages_archive, - messages_archive, - reddit_messages_archive.id, - ); - } - - // Chat - if (chat_history?.length) { - await upsertMany( - tx, - reddit_chat_history, - chat_history, - reddit_chat_history.message_id, - ); - } - - // Account & preferences - if (data.account_gender?.length) { - await upsertMany( - tx, - reddit_account_gender, - data.account_gender, - reddit_account_gender.id, // Single-row sentinel - ); - } - if (data.sensitive_ads_preferences?.length) { - await upsertMany( - tx, - reddit_sensitive_ads_preferences, - data.sensitive_ads_preferences, - reddit_sensitive_ads_preferences.type, - ); - } - if (birthdate?.length) { - // Single-row table; use sentinel primary key - await upsertMany(tx, reddit_birthdate, birthdate, reddit_birthdate.id); - } - if (data.user_preferences?.length) { - await upsertMany( - tx, - reddit_user_preferences, - data.user_preferences, - reddit_user_preferences.preference, - ); - } - if (data.linked_identities?.length) { - await upsertMany(tx, reddit_linked_identities, data.linked_identities, [ - reddit_linked_identities.issuer_id, - reddit_linked_identities.subject_id, - ]); - } - if (data.linked_phone_number?.length) { - await upsertMany( - tx, - reddit_linked_phone_number, - data.linked_phone_number, - reddit_linked_phone_number.phone_number, - ); - } - if (data.twitter?.length) { - await upsertMany( - tx, - reddit_twitter, - data.twitter, - reddit_twitter.username, - ); - } - - // Moderation & subs - if (data.approved_submitter_subreddits?.length) { - await upsertMany( - tx, - reddit_approved_submitter_subreddits, - data.approved_submitter_subreddits, - reddit_approved_submitter_subreddits.subreddit, - ); - } - if (data.moderated_subreddits?.length) { - await upsertMany( - tx, - reddit_moderated_subreddits, - data.moderated_subreddits, - reddit_moderated_subreddits.subreddit, - ); - } - if (data.subscribed_subreddits?.length) { - await upsertMany( - tx, - reddit_subscribed_subreddits, - data.subscribed_subreddits, - reddit_subscribed_subreddits.subreddit, - ); - } - if (multireddits?.length) { - await upsertMany( - tx, - reddit_multireddits, - multireddits, - reddit_multireddits.id, - ); - } - - // Commerce & payouts - if (purchases?.length) { - await upsertMany( - tx, - reddit_purchases, - purchases, - reddit_purchases.transaction_id, - ); - } - if (subscriptions?.length) { - await upsertMany( - tx, - reddit_subscriptions, - subscriptions, - reddit_subscriptions.subscription_id, - ); - } - if (payouts?.length) { - await upsertMany(tx, reddit_payouts, payouts, [ - reddit_payouts.payout_id, - reddit_payouts.date, - ]); - } - if (data.stripe?.length) { - await upsertMany( - tx, - reddit_stripe, - data.stripe, - reddit_stripe.stripe_account_id, - ); - } - - // Misc - if (announcements?.length) { - await upsertMany( - tx, - reddit_announcements, - announcements, - reddit_announcements.announcement_id, - ); - } - if (drafts?.length) { - await upsertMany(tx, reddit_drafts, drafts, reddit_drafts.id); - } - if (data.friends?.length) { - await upsertMany( - tx, - reddit_friends, - data.friends, - reddit_friends.username, - ); - } - if (gilded_content?.length) { - await upsertMany(tx, reddit_gilded_content, gilded_content, [ - reddit_gilded_content.content_link, - reddit_gilded_content.award, - reddit_gilded_content.date, - ]); - } - if (gold_received?.length) { - await upsertMany(tx, reddit_gold_received, gold_received, [ - reddit_gold_received.content_link, - reddit_gold_received.date, - ]); - } - if (ip_logs?.length) { - await upsertMany(tx, reddit_ip_logs, ip_logs, [ - reddit_ip_logs.date, - reddit_ip_logs.ip, - ]); - } - if (data.persona?.length) { - await upsertMany( - tx, - reddit_persona, - data.persona, - reddit_persona.persona_inquiry_id, - ); - } - if (data.poll_votes?.length) { - await upsertMany(tx, reddit_poll_votes, data.poll_votes, [ - reddit_poll_votes.post_id, - reddit_poll_votes.user_selection, - ]); - } - if (scheduled_posts?.length) { - await upsertMany( - tx, - reddit_scheduled_posts, - scheduled_posts, - reddit_scheduled_posts.scheduled_post_id, - ); - } - if (data.statistics?.length) { - await upsertMany( - tx, - reddit_statistics, - data.statistics, - reddit_statistics.statistic, - ); - } - if (data.checkfile?.length) { - await upsertMany( - tx, - reddit_checkfile, - data.checkfile, - reddit_checkfile.filename, - ); - } - }); -} diff --git a/packages/vault-core/src/adapters/reddit/src/validation.ts b/packages/vault-core/src/adapters/reddit/src/validation.ts index 2e215c28f0..e27112c077 100644 --- a/packages/vault-core/src/adapters/reddit/src/validation.ts +++ b/packages/vault-core/src/adapters/reddit/src/validation.ts @@ -1,5 +1,13 @@ import { type } from 'arktype'; +const date = type('string.date.parse'); +const dateOpt = type('string') + .pipe((v) => (v === '' ? undefined : v)) + .to('string.date.parse | undefined'); +const registrationDate = type('string') + .pipe((v) => (v === 'registration ip' ? undefined : v)) + .to('string.date.parse | undefined'); + // ArkType parse schema // explicit object-array schemas for all other datasets to avoid 'unknown'. export const parseSchema = type({ @@ -7,109 +15,107 @@ export const parseSchema = type({ posts: type({ id: 'string', permalink: 'string', - date: 'Date', - created_utc: 'Date', - ip: 'string | undefined', + date: date, + created_utc: date, + ip: 'string.ip', subreddit: 'string', - gildings: 'number | undefined', + gildings: 'string.numeric.parse', title: 'string | undefined', url: 'string | undefined', body: 'string | undefined', }).array(), post_headers: type({ - id: 'string | undefined', - permalink: 'string | undefined', - date: 'Date | undefined', - ip: 'string | undefined', - subreddit: 'string | undefined', - gildings: 'number | undefined', + id: 'string', + permalink: 'string', + date: date, + ip: 'string.ip', + subreddit: 'string', + gildings: 'string.numeric.parse', url: 'string | undefined', }).array(), comments: type({ id: 'string', permalink: 'string', - date: 'Date', - created_utc: 'Date', - ip: 'string | undefined', + date: date, + ip: 'string.ip', subreddit: 'string', - gildings: 'number | undefined', - link: 'string', - post_id: 'string | undefined', + gildings: 'string.numeric.parse', + link: 'string.url', parent: 'string | undefined', body: 'string | undefined', media: 'string | undefined', }).array(), comment_headers: type({ - id: 'string | undefined', - permalink: 'string | undefined', - date: 'Date | undefined', - ip: 'string | undefined', - subreddit: 'string | undefined', - gildings: 'number | undefined', - link: 'string | undefined', + id: 'string', + permalink: 'string', + date: date, + ip: 'string.ip', + subreddit: 'string', + gildings: 'string.numeric.parse', + link: 'string', parent: 'string | undefined', }).array(), // Votes / visibility / saves post_votes: type({ - id: 'string | undefined', - permalink: 'string | undefined', - direction: 'string | undefined', + id: 'string', + permalink: 'string', + direction: 'string', }).array(), comment_votes: type({ - id: 'string | undefined', - permalink: 'string | undefined', - direction: 'string | undefined', + id: 'string', + permalink: 'string', + direction: 'string', }).array(), saved_posts: type({ - id: 'string | undefined', - permalink: 'string | undefined', + id: 'string', + permalink: 'string', }).array(), saved_comments: type({ - id: 'string | undefined', - permalink: 'string | undefined', + id: 'string', + permalink: 'string', }).array(), hidden_posts: type({ - id: 'string | undefined', - permalink: 'string | undefined', + id: 'string', + permalink: 'string', }).array(), // Messaging message_headers: type({ - id: 'string | undefined', - permalink: 'string | undefined', + id: 'string', + permalink: 'string', thread_id: 'string | undefined', - date: 'Date | undefined', - ip: 'string | undefined', + date: dateOpt, + ip: 'string.ip', from: 'string | undefined', to: 'string | undefined', }).array(), messages: type({ - id: 'string | undefined', - permalink: 'string | undefined', + id: 'string', + permalink: 'string', thread_id: 'string | undefined', - date: 'Date | undefined', - ip: 'string | undefined', + date: dateOpt, + ip: 'string.ip', from: 'string | undefined', to: 'string | undefined', subject: 'string | undefined', body: 'string | undefined', }).array(), messages_archive_headers: type({ - id: 'string | undefined', - permalink: 'string | undefined', + id: 'string', + permalink: 'string', thread_id: 'string | undefined', - date: 'Date | undefined', - ip: 'string | undefined', + date: dateOpt, + ip: 'string.ip', from: 'string | undefined', to: 'string | undefined', }).array(), messages_archive: type({ - id: 'string | undefined', - permalink: 'string | undefined', + id: 'string', + permalink: 'string', thread_id: 'string | undefined', - date: 'Date | undefined', - ip: 'string | undefined', + date: dateOpt, + ip: 'string.ip', from: 'string | undefined', to: 'string | undefined', subject: 'string | undefined', @@ -118,9 +124,9 @@ export const parseSchema = type({ // Chat chat_history: type({ - message_id: 'string | undefined', - created_at: 'Date | undefined', - updated_at: 'Date | undefined', + message_id: 'string', + created_at: dateOpt, + updated_at: dateOpt, username: 'string | undefined', message: 'string | undefined', thread_parent_message_id: 'string | undefined', @@ -131,38 +137,40 @@ export const parseSchema = type({ }).array(), // Account and preferences - account_gender: type({ account_gender: 'string | undefined' }).array(), + account_gender: type({ + account_gender: 'string | undefined', + }).array(), sensitive_ads_preferences: type({ - type: 'string | undefined', + type: 'string', preference: 'string | undefined', }).array(), birthdate: type({ - birthdate: 'Date | undefined', - verified_birthdate: 'Date | undefined', + birthdate: dateOpt, + verified_birthdate: dateOpt, verification_state: 'string | undefined', verification_method: 'string | undefined', }).array(), user_preferences: type({ - preference: 'string | undefined', + preference: 'string', value: 'string | undefined', }).array(), linked_identities: type({ - issuer_id: 'string | undefined', - subject_id: 'string | undefined', + issuer_id: 'string', + subject_id: 'string', }).array(), - linked_phone_number: type({ phone_number: 'string | undefined' }).array(), - twitter: type({ username: 'string | undefined' }).array(), + linked_phone_number: type({ phone_number: 'string' }).array(), + twitter: type({ username: 'string' }).array(), // Moderation / subscriptions / subreddits approved_submitter_subreddits: type({ - subreddit: 'string | undefined', + subreddit: 'string', }).array(), - moderated_subreddits: type({ subreddit: 'string | undefined' }).array(), - subscribed_subreddits: type({ subreddit: 'string | undefined' }).array(), + moderated_subreddits: type({ subreddit: 'string' }).array(), + subscribed_subreddits: type({ subreddit: 'string' }).array(), multireddits: type({ - id: 'string | undefined', + id: 'string', display_name: 'string | undefined', - date: 'Date | undefined', + date: dateOpt, description: 'string | undefined', privacy: 'string | undefined', subreddits: 'string | undefined', @@ -175,35 +183,35 @@ export const parseSchema = type({ // Commerce and payouts purchases: type({ processor: 'string | undefined', - transaction_id: 'string | undefined', + transaction_id: 'string', product: 'string | undefined', - date: 'Date | undefined', + date: dateOpt, cost: 'string | undefined', currency: 'string | undefined', status: 'string | undefined', }).array(), subscriptions: type({ processor: 'string | undefined', - subscription_id: 'string | undefined', + subscription_id: 'string', product: 'string | undefined', product_id: 'string | undefined', product_name: 'string | undefined', status: 'string | undefined', - start_date: 'Date | undefined', - end_date: 'Date | undefined', + start_date: dateOpt, + end_date: dateOpt, }).array(), payouts: type({ payout_amount_usd: 'string | undefined', - date: 'Date | undefined', + date: date, payout_id: 'string | undefined', }).array(), - stripe: type({ stripe_account_id: 'string | undefined' }).array(), + stripe: type({ stripe_account_id: 'string' }).array(), // Misc announcements: type({ - announcement_id: 'string | undefined', - sent_at: 'Date | undefined', - read_at: 'Date | undefined', + announcement_id: 'string', + sent_at: dateOpt, + read_at: dateOpt, from_id: 'string | undefined', from_username: 'string | undefined', subject: 'string | undefined', @@ -211,11 +219,11 @@ export const parseSchema = type({ url: 'string | undefined', }).array(), drafts: type({ - id: 'string | undefined', + id: 'string', title: 'string | undefined', body: 'string | undefined', kind: 'string | undefined', - created: 'Date | undefined', + created: dateOpt, spoiler: 'string | undefined', nsfw: 'string | undefined', original_content: 'string | undefined', @@ -227,25 +235,25 @@ export const parseSchema = type({ is_public_link: 'string | undefined', }).array(), friends: type({ - username: 'string | undefined', + username: 'string', note: 'string | undefined', }).array(), gilded_content: type({ - content_link: 'string | undefined', + content_link: 'string', award: 'string | undefined', amount: 'string | undefined', - date: 'Date | undefined', + date: dateOpt, }).array(), gold_received: type({ - content_link: 'string | undefined', + content_link: 'string', gold_received: 'string | undefined', gilder_username: 'string | undefined', - date: 'Date | undefined', + date: dateOpt, }).array(), - ip_logs: type({ date: 'Date | undefined', ip: 'string | undefined' }).array(), - persona: type({ persona_inquiry_id: 'string | undefined' }).array(), + ip_logs: type({ date: registrationDate, ip: 'string.ip' }).array(), + persona: type({ persona_inquiry_id: 'string' }).array(), poll_votes: type({ - post_id: 'string | undefined', + post_id: 'string', user_selection: 'string | undefined', text: 'string | undefined', image_url: 'string | undefined', @@ -253,20 +261,20 @@ export const parseSchema = type({ stake_amount: 'string | undefined', }).array(), scheduled_posts: type({ - scheduled_post_id: 'string | undefined', + scheduled_post_id: 'string', subreddit: 'string | undefined', title: 'string | undefined', body: 'string | undefined', url: 'string | undefined', - submission_time: 'Date | undefined', + submission_time: dateOpt, recurrence: 'string | undefined', }).array(), statistics: type({ - statistic: 'string | undefined', + statistic: 'string', value: 'string | undefined', }).array(), checkfile: type({ - filename: 'string | undefined', + filename: 'string', sha256: 'string | undefined', }).array(), }); diff --git a/packages/vault-core/src/codecs/index.ts b/packages/vault-core/src/codecs/index.ts new file mode 100644 index 0000000000..1433df45a0 --- /dev/null +++ b/packages/vault-core/src/codecs/index.ts @@ -0,0 +1,2 @@ +export * from './json'; +export * from './markdown'; diff --git a/packages/vault-core/src/codecs/json.spec.ts b/packages/vault-core/src/codecs/json.spec.ts new file mode 100644 index 0000000000..3f091d0ce5 --- /dev/null +++ b/packages/vault-core/src/codecs/json.spec.ts @@ -0,0 +1,29 @@ +import { describe, test } from 'bun:test'; +import assert from 'node:assert/strict'; +import { jsonFormat } from './json'; + +describe('JSON', () => { + test('stringify converts Date objects into {$date}', () => { + const input = { exportedAt: new Date('1970-01-01T00:00:00.000Z') }; + + const out = jsonFormat.stringify(input); + const parsed = JSON.parse(out); + + // Validate structure and type + assert.ok(typeof parsed.exportedAt.$date === 'string'); + assert.strictEqual(parsed.exportedAt.$date, '1970-01-01T00:00:00.000Z'); + }); + + test('parse revives {$date} back to a real Date', () => { + const text = JSON.stringify({ + exportedAt: { $date: '2020-01-02T03:04:05.000Z' }, + }); + const revived = jsonFormat.parse(text); + + assert.ok(revived.exportedAt instanceof Date); + assert.strictEqual( + revived.exportedAt.toISOString(), + '2020-01-02T03:04:05.000Z', + ); + }); +}); diff --git a/packages/vault-core/src/codecs/json.ts b/packages/vault-core/src/codecs/json.ts new file mode 100644 index 0000000000..343e822c19 --- /dev/null +++ b/packages/vault-core/src/codecs/json.ts @@ -0,0 +1,50 @@ +import { defineCodec } from '../core/codec'; + +export const jsonFormat = defineCodec({ + id: 'json', + fileExtension: 'json', + mimeType: 'application/json', + parse(text) { + return JSON.parse(text, (_, value) => { + // Revive pseudo-date objects + if (isJsonDate(value)) { + return fromJSONDate(value); + } + return value; + }); + }, + stringify(rec) { + // Need to override `Date.toJSON` to get desired format + // Otherwise we get ISO strings directly + const originalDateStringifier = Date.prototype.toJSON; + Date.prototype.toJSON = function () { + return toJSONDate(this) as unknown as string; + }; + + try { + return JSON.stringify(rec, null, 2); + } finally { + Date.prototype.toJSON = originalDateStringifier; + } + }, +}); + +// Pseudo-date because we can't serialize Date objects in JSON, unlike YAML +type JsonDate = { $date: string }; // Is that a BSON reference??? + +function isJsonDate(v: unknown): v is JsonDate { + return ( + typeof v === 'object' && + v !== null && + '$date' in v && + typeof v.$date === 'string' + ); +} + +function toJSONDate(date: Date): JsonDate { + return { $date: date.toISOString() }; +} + +function fromJSONDate(jsonDate: JsonDate): Date { + return new Date(jsonDate.$date); +} diff --git a/packages/vault-core/src/codecs/markdown.ts b/packages/vault-core/src/codecs/markdown.ts new file mode 100644 index 0000000000..0635b2f588 --- /dev/null +++ b/packages/vault-core/src/codecs/markdown.ts @@ -0,0 +1,15 @@ +import { defineCodec } from '../core/codec'; +import { YAML } from '../utils/format/yaml'; + +// TODO figure out condition for body prop (name based??) +export const markdownFormat = defineCodec({ + id: 'markdown', + fileExtension: 'md', + mimeType: 'text/markdown', + parse(text) { + return YAML.parse(text); + }, + stringify(rec) { + return YAML.stringify(rec); + }, +}); diff --git a/packages/vault-core/src/core/adapter.ts b/packages/vault-core/src/core/adapter.ts index a7ea29d4db..471a8b1c45 100644 --- a/packages/vault-core/src/core/adapter.ts +++ b/packages/vault-core/src/core/adapter.ts @@ -1,34 +1,37 @@ -import { type Type, type } from 'arktype'; -import type { defineConfig } from 'drizzle-kit'; -import type { ColumnsSelection } from 'drizzle-orm'; -import type { LibSQLDatabase } from 'drizzle-orm/libsql'; -import { - type BaseSQLiteDatabase, - integer, - type SQLiteTable, - type SubqueryWithSelection, - sqliteTable, - text, +import type { StandardSchemaV1 } from '@standard-schema/spec'; +import type { ColumnsSelection, InferSelectModel } from 'drizzle-orm'; +import type { + SQLiteTable, + SubqueryWithSelection, } from 'drizzle-orm/sqlite-core'; +import type { CompatibleDB } from './db'; +import type { Ingestor } from './ingestor'; +import type { + DataTransform, + RequiredTransformTags, + Tag4, + VersionDef, +} from './migrations'; -type ExtractedResult = T extends BaseSQLiteDatabase<'async', infer R> - ? R - : never; -type ResultSet = ExtractedResult; - -// Bootstrapped type to represent compatible Drizzle database types across the codebase -export type CompatibleDB> = - BaseSQLiteDatabase<'sync' | 'async', TSchema | ResultSet>; - -type DrizzleConfig = ReturnType; +/** Column-level metadata */ +export type ColumnInfo = string; +/** Per-table simple column descriptions or rich ColumnInfo for each column. */ export type ColumnDescriptions> = { [K in keyof T]: { - [C in keyof T[K]['_']['columns']]: string; + [C in keyof T[K]['_']['columns']]: ColumnInfo; }; }; -type View< +/** Table metadata in human readable format. */ +export type AdapterMetadata> = { + [K in keyof TSchema]?: { + [C in keyof TSchema[K]['_']['columns']]?: ColumnInfo; + }; +}; + +/** View helper used by adapters for predefined queries (optional). */ +export type View< T extends string, TSelection extends ColumnsSelection, TSchema extends Record, @@ -38,157 +41,301 @@ type View< definition: (db: TDatabase) => SubqueryWithSelection; }; +// TODO remove once https://github.com/drizzle-team/drizzle-orm/issues/2745 is resolved +/** Convert `null` properties in a type to `undefined` */ +type NullToUndefined = { + [K in keyof T]: T[K] extends null + ? undefined + : T[K] extends (infer U)[] + ? NullToUndefined[] + : Exclude | ([null] extends [T[K]] ? undefined : never); +}; + +type Simplify = { [K in keyof T]: T[K] } & {}; + +type ColumnHasDefaultValue = TColumn extends { + _: { + hasDefault: infer HasDefault; + hasRuntimeDefault: infer HasRuntimeDefault; + isAutoincrement: infer IsAutoincrement; + }; +} + ? HasDefault extends true + ? true + : HasRuntimeDefault extends true + ? true + : IsAutoincrement extends true + ? true + : false + : false; + +type ColumnKeys = Extract< + keyof TTable['_']['columns'], + string +>; + +type ColumnsWithDefaults = { + [K in ColumnKeys]: ColumnHasDefaultValue< + TTable['_']['columns'][K] + > extends true + ? K + : never; +}[ColumnKeys]; + +type ApplyDefaultableColumns< + TTable extends SQLiteTable, + TRow extends Record, +> = Simplify< + Omit, keyof TRow>> & { + [K in Extract, keyof TRow>]?: + | TRow[K] + | undefined; + } +>; + +// Allow server generated columns (defaults, runtime defaults, autoincrement IDs) to be omitted in validator payloads. +type TableRowShape = + NullToUndefined> extends infer Row + ? Row extends Record + ? ApplyDefaultableColumns + : Row + : never; + +/** + * Map a prefixed schema record to an object whose keys are the table names with the + * adapter prefix removed and whose values are arrays of the inferred row type. + * + * This represents the natural shape for bulk ingestion: each table produces many rows. + * + * @example `reddit` ID and `reddit_posts` table become `posts` + */ +export type SchemaMappedToObject< + TID extends string, + TObj extends Record, +> = { + [K in keyof TObj as K extends `${TID}_${infer Rest}` + ? Rest + : K]: TableRowShape[]; +}; + +type TransformAlignment[]> = { + [K in RequiredTransformTags]: DataTransform; +} & { + [K in Exclude>]?: never; +}; + +/** + * Unified Adapter: schema + parsing/upsert lifecycle. + */ export interface Adapter< TID extends string = string, - TSchema extends Record = Record, - TDatabase extends CompatibleDB = CompatibleDB, - TParserShape extends Type = Type, - TParsed = TParserShape['infer'], + TTableNames extends string = string, + TSchema extends Record = Record< + string, + SQLiteTable + >, + TVersions extends + readonly VersionDef[] = readonly VersionDef[], + TPreparsed = unknown, + TParsed = unknown, > { - /** - * Unique identifier for the adapter - * - * Should be lowercase, no spaces, alpha-numeric. - * @example "twitter" - */ + /** Unique identifier for the adapter (lowercase, no spaces, alphanumeric) */ id: TID; - /** - * User-facing name - * @example "Reddit Adapter" - */ - name: string; - - /** Database schema */ - schema: TSchema; + /** Drizzle schema object. */ + schema: TID extends string + ? TSchema + : EnsureAllTablesArePrefixedWith extends never + ? never + : EnsureSchemaHasPrimaryKeys; - /** Column descriptions for every table/column */ - metadata: ColumnDescriptions; + /** Adapter metadata for UI/help */ + metadata?: AdapterMetadata; - /** - * ArkType schema for parsing/validation - * - * This will be used by the MCP server to validate data returned from the `parse` method. - */ - validator: TParserShape; - - /** - * Predefined views/CTEs - * - * Should be used for common queries that a user will want to query for. This is especially helpful if the data storage format is complex/unintuitive. - * - * @example - * "recently_played": { - * description: "Recently played songs", - * definition: (db) => db.select().from(songs).where(...) - * } + /** Optional predefined views + * Note: to avoid function parameter variance issues in structural assignability, + * we use a base schema shape for the DB parameter rather than the adapter's TSchema. */ views?: { - [Alias in string]: View; + [Alias in string]: View< + Alias, + ColumnsSelection, + Record, + CompatibleDB> + >; }; + /** Pipelines for importing new data. Validation/morphing happens via `adapter.validator` */ + ingestors?: readonly Ingestor[]; + /** - * Drizzle config - * - * @example - * defineConfig({ - * dialect: 'sqlite', - * schema: './src/schema.ts', - * out: './migrations', - * migrations: { - * table: 'test_migrations', - * }, - * }) + * Optional Standard Schema validator for parsed payload (ingest pipeline). */ - drizzleConfig: DrizzleConfig; - - // Lifecycle hooks + validator?: StandardSchemaV1; /** - * Parse a blob into a parsed representation - * @example - * const text = await b.text(); - * return JSON.parse(text); + * Authoring-time versions tuple used for JS transform tag alignment checks + * and runtime transform planning. */ - parse: (file: Blob) => Promise; + versions: TVersions; - /** Upsert data into the database */ - upsert: (db: TDatabase, data: TParsed) => Promise; + /** + * Transform registry for forward data migrations. Alignment with versions is enforced + * when adapters are authored through `defineAdapter`. + */ + transforms: Partial>; } -// Note: If a generic only appears in a function parameter position, TS won't infer it and will fall back to the constraint (e.g. `object`). -// These overloads infer the full function type `F` instead, preserving the args type. +/** + * Define a new adapter where the validator's parsed output must match the schema's InferSelect shape, + * where all tables are prefixed and have primary keys, and (optionally) enforce JS transform tag alignment when 'versions' and 'transforms' are provided. + */ export function defineAdapter< - // biome-ignore lint/suspicious/noExplicitAny: Variance-friendly identity for adapter factories - F extends () => Adapter, ->(adapter: F): F; + const TID extends string, + TSchema extends Record, + TVersions extends readonly VersionDef[] = readonly VersionDef[], + TPreparsed = unknown, + TParsed extends SchemaMappedToObject = SchemaMappedToObject< + TID, + TSchema + >, +>( + adapter: () => PrefixedAdapter, +): () => PrefixedAdapter; export function defineAdapter< - // biome-ignore lint/suspicious/noExplicitAny: Variance-friendly identity for adapter factories - F extends (args: any) => Adapter, ->(adapter: F): F; -// Implementation signature can be broad; overloads provide strong typing to callers + const TID extends string, + TSchema extends Record, + TPreparsed, + TVersions extends readonly VersionDef[], + TParsed extends SchemaMappedToObject = SchemaMappedToObject< + TID, + TSchema + >, + TArgs extends unknown[] = [], +>( + adapter: ( + ...args: TArgs + ) => PrefixedAdapter, +): ( + ...args: TArgs +) => PrefixedAdapter; export function defineAdapter unknown>( adapter: F, ): F { - return adapter; + const wrapped = ((...args: Parameters) => { + const instance = adapter(...args); + validateTransformsAgainstVersions(instance as Adapter); + return instance; + }) as unknown as F; + return wrapped; } -// Example -// TODO remove - -const songs = sqliteTable('songs', { - id: integer('id').primaryKey(), - title: text('title'), - artist: text('artist'), - album: text('album'), - year: integer('year'), -}); - -const _testAdapter = defineAdapter(() => ({ - id: 'test', - name: 'Test Adapter', - validator: type({ - id: 'number', - title: 'string', - artist: 'string', - album: 'string', - year: 'number', - }), - schema: { - songs, - }, - drizzleConfig: { - dialect: 'sqlite', - schema: './src/schema.ts', - casing: 'snake_case', - strict: true, - out: './migrations', - migrations: { - table: 'test_migrations', - }, - }, - parse: (file) => file.text().then(JSON.parse), - upsert: (db, data) => - db - .insert(songs) - .values(data) - .onConflictDoUpdate({ - target: songs.id, - set: { - title: data.title, - artist: data.artist, - album: data.album, - year: data.year, - }, - }) - .then(() => undefined), - metadata: { - songs: { - id: 'Unique identifier for the song', - title: 'Title of the song', - artist: 'Artist of the song', - album: 'Album of the song', - year: 'Year of release', - }, - }, -})); +/** + * Compile-time detection of whether a table has a primary key. + * Looks for any column with an internal `_.isPrimaryKey === true` flag. + * Produces `true` when at least one PK column exists; otherwise `false`. + */ +type TableHasPrimaryKey = TTable extends { + _: { columns: infer TCols extends Record }; +} + ? { + [K in keyof TCols]: TCols[K] extends { _: { isPrimaryKey: true } } + ? K + : never; + }[keyof TCols] extends never + ? false + : true + : false; +type EnsureSchemaHasPrimaryKeys> = { + [K in keyof S]: TableHasPrimaryKey extends false ? never : K & string; +}[keyof S]; + +type KeysOf = Extract; + +/** + * Compile time check for table name prefixing + */ +type PrefixedAdapter< + TID extends string, + Schema extends Record, + TVersions extends readonly VersionDef[], + TPreparsed, + TParsed, +> = Adapter, Schema, TVersions, TPreparsed, TParsed> & { + transforms: TransformAlignment; +} & (MissingPrefixedTables extends never + ? unknown + : { + __error__schema_table_prefix_mismatch__: `Expected all tables to start with "${TID}_"`; + }) & + (MissingPrimaryKeyTables extends never + ? unknown + : { + __error__missing_primary_keys__: MissingPrimaryKeyTables; + }); + +// Compute the set of schema keys that are NOT prefixed with `${TID}_` +type MissingPrefixedTables< + TID extends string, + TSchema extends Record, +> = Exclude, `${TID}_${string}`>; + +// Compute the set of tables that do not declare a primary key +type MissingPrimaryKeyTables> = { + [K in keyof S]: TableHasPrimaryKey extends false ? K & string : never; +}[keyof S]; + +type EnsureAllTablesArePrefixedWith< + TID extends string, + TSchema extends Record, +> = + Exclude, `${TID}_${string}`> extends never + ? TSchema + : never; +type SchemaTableNames> = Extract< + keyof TSchema, + string +>; + +/** + * Compile-time detection of duplicate adapter IDs in a tuple. + * Produces the first duplicate ID union if any; otherwise never. + */ +type NoDuplicateAdapter< + T extends readonly { id: string }[], + Seen extends string = never, +> = T extends readonly [infer H, ...infer R] + ? H extends { id: infer ID extends string } + ? ID extends Seen + ? ID | NoDuplicateAdapter, Seen> + : NoDuplicateAdapter, Seen | ID> + : never + : never; + +/** + * Enforce unique adapter IDs at the type level for tuple literals. + * Evaluates to T when no duplicates; else never (surfacing a type error). + */ +export type UniqueAdapters = + NoDuplicateAdapter extends never ? T : never; + +function validateTransformsAgainstVersions(adapter: Adapter) { + const versions = adapter.versions ?? []; + if (!versions.length) return; + + const declaredTags = versions.map((v) => v.tag); + const required = declaredTags.slice(1); + const transforms = adapter.transforms ?? {}; + const actual = Object.keys(transforms); + + const missing = required.filter((tag) => !actual.includes(tag)); + const extras = actual.filter((tag) => !required.includes(tag)); + + if (missing.length > 0 || extras.length > 0) { + throw new Error( + `defineAdapter: adapter '${adapter.id}' transforms do not match versions. ` + + `required=[${required.join(',')}] actual=[${actual.join(',')}] ` + + `missing=[${missing.join(',')}] extras=[${extras.join(',')}]`, + ); + } +} diff --git a/packages/vault-core/src/core/codec.ts b/packages/vault-core/src/core/codec.ts new file mode 100644 index 0000000000..37b6205bd0 --- /dev/null +++ b/packages/vault-core/src/core/codec.ts @@ -0,0 +1,93 @@ +import type { SQLiteColumn, SQLiteTable } from 'drizzle-orm/sqlite-core'; + +// Language-level codec (Markdown, JSON, TOML+body, etc.) +export interface Codec { + /** Unique identifier (e.g., 'markdown', 'json', 'toml', 'yaml-md') */ + id: TID; + /** Default file extension without dot + * @example 'md' + */ + fileExtension: TExt; + /** MIME type for file exports (e.g., 'text/markdown', 'application/json') */ + mimeType: string; + /** + * Parse file text into a flat record. If a free-form body is present, + * codecs should use the reserved key 'body' to carry it. + */ + parse(text: string): Record; + /** + * Stringify a flat record into file text. If a 'body' key is present, + * codecs that support bodies should place it appropriately (e.g., after + * frontmatter); others may serialize it as a normal field. + */ + stringify(rec: Record): string; +} + +// Runtime view of a Drizzle table +export type TableEntry = [name: string, table: SQLiteTable]; +export type ColumnEntry = [name: string, column: SQLiteColumn]; + +// Per-codec convention profile that derives mapping decisions from schema + naming +export interface ConventionProfile { + // compute relative path from table + pk values + pathFor(adapterId: string, tableName: string, pkValues: string[]): string; +} + +// Helpers +export function listTables(schema: Record): TableEntry[] { + return Object.entries(schema) as TableEntry[]; +} + +export function listColumns(table: SQLiteTable): ColumnEntry[] { + return Object.entries(table) as ColumnEntry[]; +} + +/** + * Find primary key columns in a table. + * + * Due to type-safety in adapter.ts, *all* tables should have a primary key. + * @throws if no primary key found + */ +export function listPrimaryKeys(tableName: string, table: SQLiteTable) { + const cols = listColumns(table); + const pkCols = []; + for (const col of cols) { + const [, drizzleCol] = col; + if (drizzleCol.primary) pkCols.push(col); + } + + if (pkCols.length === 0) + throw new Error(`Table ${tableName} has no primary key`); + + return pkCols; +} + +// Choose body column by common names, prefer notNull string-like columns named body/content/text +// (Body selection moved to the codecs themselves.) + +// Default per-codec convention profile (opinionated) +// Picks a body-capable format (prefer 'markdown') when body column exists; else 'json'. +export function defaultConvention(): ConventionProfile { + return { + pathFor(adapterId, tableName, pkValues) { + // Merge PK values with __, sorted by key name for determinism + const parts = pkValues + .toSorted((a, b) => a.localeCompare(b)) + .map((v) => String(v)); + const fileId = parts.length > 0 ? parts.join('__') : 'row'; + // extension decided by mode at callsite; we return a directory path root here + return `vault/${adapterId}/${tableName}/${fileId}`; + }, + }; +} + +type NoDotPrefix = T extends `.${string}` ? never : T; + +/** + * defineFormat: identity helper for a single Codec (markdown, json, etc.). + */ +export function defineCodec( + codec: Codec>, +) { + return codec; +} diff --git a/packages/vault-core/src/core/config.ts b/packages/vault-core/src/core/config.ts index 8d57bbdd12..0891217d40 100644 --- a/packages/vault-core/src/core/config.ts +++ b/packages/vault-core/src/core/config.ts @@ -1,37 +1,64 @@ -import type { MigrationConfig } from 'drizzle-orm/migrator'; -import type { BaseSQLiteDatabase } from 'drizzle-orm/sqlite-core'; -import type { Adapter } from './adapter'; +import type { Adapter, UniqueAdapters } from './adapter'; +import type { Codec, ConventionProfile } from './codec'; +import type { DrizzleDb } from './db'; -export interface VaultConfig< - TDatabase extends BaseSQLiteDatabase<'sync' | 'async', unknown>, - TAdapters extends Adapter[], -> { - /** - * List of adapters to include - * - * @see {Adapter} - */ - adapters: TAdapters; +/** Construct a Vault around a Drizzle DB. */ +export type CoreOptions = { + database: DrizzleDb; + adapters: UniqueAdapters; +}; - /** - * Database connection instance - * @example - * import { createClient } from '@libsql/client'; - * const client = createClient({ url: dbUrl }); - * const db = drizzle(client); - * ... - * database: db, - */ - database: TDatabase; +/** Helper to get Adapter ID from Adapter */ +export type AdapterIDs = T[number]['id']; - /** - * Drizzle platform-specific migration function - * @example - * import { migrate } from 'drizzle-orm/libsql/migrator'; - * ... - * migrateFunc: migrate, - * @see {MigrationConfig} - * @todo Implement in-house migration procedure, which doesn't rely on `node:fs`. - */ - migrateFunc: (db: TDatabase, config: MigrationConfig) => Promise; -} +/** + * Per-call codec and conventions (override defaults). + * Caller must provide the Adapter that owns the schema to export. + */ +export type ExportOptions = { + /** Adapters to export (compile-time unique by id for tuple literals). Defaults to all adapters. */ + adapterIDs?: AdapterIDs>[]; + /** Codec (format) to use for exports */ + codec: Codec; + /** Optional conventions override (otherwise uses built-in default) */ + conventions?: ConventionProfile; +}; + +/** + * Import options: caller provides files and codec only. + * Adapters, versions, transforms, and validation are determined by adapter definitions. + */ +export type ImportOptions = { + files: Map; + /** Codec (format) to use for imports. Must match the exported format */ + codec: Codec; +}; + +/** IngestOptions variant for one-time single-file ingestors (e.g., ZIP). */ +export type IngestOptions = { + adapter: Adapter; + file: File; +}; + +export type AdapterTables = TAdapter['schema']; +export type AdapterTableMap = { + [AdapterID in AdapterIDs]: AdapterTables< + Extract + >; +}; +export type QueryInterface = { + /** Map of adapter ID -> table name -> table object */ + tables: AdapterTableMap; + db: DrizzleDb; +}; + +/** + * Vault: minimal API surface. + * Methods use object method shorthand as per project conventions. + */ +export type Vault = { + exportData(options: ExportOptions): Promise>; + importData(options: ImportOptions): Promise; + ingestData(options: IngestOptions): Promise; + getQueryInterface(): QueryInterface; +}; diff --git a/packages/vault-core/src/core/db.ts b/packages/vault-core/src/core/db.ts new file mode 100644 index 0000000000..68522b21bd --- /dev/null +++ b/packages/vault-core/src/core/db.ts @@ -0,0 +1,20 @@ +import type { AnyTable as AnyTableGeneric } from 'drizzle-orm'; +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type { BaseSQLiteDatabase } from 'drizzle-orm/sqlite-core'; + +// Shared types +type ExtractedResult = + T extends BaseSQLiteDatabase<'async', infer R> ? R : never; + +type ResultSet = ExtractedResult; + +// Represents compatible Drizzle DB types across the codebase +export type CompatibleDB = BaseSQLiteDatabase< + 'sync' | 'async', + TSchema | ResultSet +>; + +/** Minimal Drizzle-DB type expected by core. Hosts pass a concrete Drizzle instance. */ +export type DrizzleDb = CompatibleDB; + +export type AnyTable = AnyTableGeneric<{ name: string }>; // simplify usage diff --git a/packages/vault-core/src/core/import/importPipeline.ts b/packages/vault-core/src/core/import/importPipeline.ts new file mode 100644 index 0000000000..d3140f1c82 --- /dev/null +++ b/packages/vault-core/src/core/import/importPipeline.ts @@ -0,0 +1,98 @@ +/** + * Lightweight orchestrator that lifts adapter metadata (versions + transforms) + * into a data-fix pipeline before rows touch the database. Hosts supply the raw + * dataset gathered from disk/ingestors; we return the fully-normalized rows. + * + * High-level flow: + * 1. Determine which transform registry + version tuple to use (allowing runtime overrides). + * 2. If a manifest/versions tuple exists, run every forward transform and optional validator. + * 3. Otherwise run the validator directly (legacy adapters). + * 4. Hand the morphed dataset back to the caller for ingestion. + */ +import type { Adapter } from '../adapter'; +import type { + DataValidator, + Tag4, + TransformRegistry, + TransformRegistryForVersions, + VersionDef, +} from '../migrations'; +import { transformAndValidate } from '../migrations'; + +/** + * Runtime configuration for a single pipeline run. + * + * dataset — Adapter-shaped record of unprefixed table keys mapped to rows. + * adapter — The adapter we’re importing (used to grab default versions/transforms). + * transformsOverride / versionsOverride + * — Allows tests or hosts to inject a different transform chain. + * dataValidator — Optional Standard Schema validator; defaults to adapter.validator upstream. + * sourceTag — Tag provided explicitly (e.g., host UI choice). + * detectedTag — Tag auto-detected out of the import metadata (migration.json). + */ +export type ImportPipelineInput = { + dataset: Record; + adapter: Adapter; + transformsOverride?: TransformRegistry | undefined; + versionsOverride?: readonly VersionDef[] | undefined; + dataValidator?: DataValidator | undefined; + sourceTag?: string | undefined; + detectedTag?: string | undefined; +}; + +/** + * Executes the import pipeline, producing a dataset that matches the adapter’s current schema. + * - Chooses effective version + transform chain (preferring overrides over adapter defaults). + * - Runs transform chain + validator when available. + * - Falls back to direct validation for legacy adapters without versions/transforms. + */ +export async function runImportPipeline( + input: ImportPipelineInput, +): Promise> { + const { + dataset, + adapter, + transformsOverride, + versionsOverride, + dataValidator, + sourceTag, + detectedTag, + } = input; + + const transforms = (transformsOverride ?? adapter.transforms) as + | TransformRegistry + | undefined; + const baseVersions = adapter.versions; + const effectiveVersions = (versionsOverride ?? baseVersions) as + | readonly VersionDef[] + | undefined; + const resolvedSourceTag = sourceTag ?? detectedTag; + + // Any transforms/validation happen purely in-memory; we never mutate the original dataset object. + let pipelineOutput: Record = dataset; + + if (effectiveVersions && transforms) { + // Both versions + transforms exist → run the forward chain plus validation. + const typedVersions = effectiveVersions; + const typedTransforms = transforms as TransformRegistryForVersions< + typeof typedVersions + >; + const typedDataset = dataset as { + [key: string]: Record[]; + }; + const result = await transformAndValidate( + typedVersions, + typedTransforms, + typedDataset, + resolvedSourceTag, + dataValidator, + ); + pipelineOutput = result as Record; + } else if (dataValidator) { + // Legacy adapter path: run a validator if provided, but skip transform orchestration. + const validated = await dataValidator(dataset); + pipelineOutput = validated as Record; + } + + return pipelineOutput; +} diff --git a/packages/vault-core/src/core/import/migrationMetadata.ts b/packages/vault-core/src/core/import/migrationMetadata.ts new file mode 100644 index 0000000000..de07ea3741 --- /dev/null +++ b/packages/vault-core/src/core/import/migrationMetadata.ts @@ -0,0 +1,101 @@ +/** + * Utilities for producing metadata files that describe an adapter’s migration state. + * + * When we export data, we attach a JSON manifest so future imports know: + * - Which version tag the vault was on (ledger vs. adapter default) + * - Which tags are declared in the adapter today + * - When the export snapshot was taken + * + * Host tooling can read this file to pre-populate “source version” selectors, drive + * transform planning, or display drift warnings (ledger vs. declared versions). + */ +import { jsonFormat } from '../../codecs'; +import type { Adapter } from '../adapter'; +import type { DrizzleDb } from '../db'; +import { ensureVaultLedgerTables, getVaultLedgerTag } from '../migrations'; + +export const MIGRATION_META_DIR = '__meta__'; +export const MIGRATION_META_FILENAME = 'migration.json'; + +/** + * Shape of the emitted metadata file (written as JSON under __meta__/ADAPTER/migration.json). + */ +export type MigrationMetadata = { + adapterId: string; + tag: string | null; + source: 'ledger' | 'adapter'; + ledgerTag: string | null; + latestDeclaredTag: string | null; + versions: string[]; + exportedAt: Date; +}; + +/** + * Helper: fetch the last-applied tag for an adapter from the migration store. + * Returns undefined when the host does not provide a store or when no tag is stored yet, + * which signals downstream logic to fall back to adapter-declared versions. + */ +async function resolveLedgerTag( + adapterId: string, + db?: DrizzleDb, +): Promise { + if (!db) return undefined; + await ensureVaultLedgerTables(db); + return (await getVaultLedgerTag(db, adapterId)) ?? undefined; +} + +/** + * Produce migration metadata for a single adapter. + * + * Priority order for `tag`: + * 1. ledgerTag (vault-migrations table) when available + * 2. latest declared tag from the adapter manifest + * 3. null when neither exists (fresh adapter) + * + * `versions` is emitted as the manifest’s ordered tag list so consumers can plan forward chains. + */ +export async function createMigrationMetadata( + adapter: Adapter, + db?: DrizzleDb, + clock: () => Date = () => new Date(), +): Promise { + const versions = adapter.versions ?? []; + const declaredTags = versions.map((v) => v.tag); + const ledgerTag = await resolveLedgerTag(adapter.id, db); + const latestDeclaredTag = declaredTags.length + ? declaredTags[declaredTags.length - 1] + : undefined; + const resolvedTag = ledgerTag ?? latestDeclaredTag ?? null; + return { + adapterId: adapter.id, + tag: resolvedTag, + source: ledgerTag ? 'ledger' : 'adapter', + ledgerTag: ledgerTag ?? null, + latestDeclaredTag: latestDeclaredTag ?? null, + versions: declaredTags, + exportedAt: clock(), + }; +} + +/** + * Convenience: build the on-disk metadata file alongside the in-memory metadata object. + * Returns both so callers can stash the file in an export archive and keep the parsed metadata. + */ +export async function createMigrationMetadataFile( + adapter: Adapter, + db?: DrizzleDb, + clock?: () => Date, +): Promise<{ path: string; file: File; metadata: MigrationMetadata }> { + const metadata = await createMigrationMetadata(adapter, db, clock); + const file = new File( + // We'll use JSON codec here so that date serialization is consistent + [jsonFormat.stringify(metadata)], + MIGRATION_META_FILENAME, + { type: 'application/json' }, + ); + return { + path: `${MIGRATION_META_DIR}/${adapter.id}/${MIGRATION_META_FILENAME}`, + file, + metadata, + }; +} diff --git a/packages/vault-core/src/core/index.ts b/packages/vault-core/src/core/index.ts index e2d5c5fc60..b45b869931 100644 --- a/packages/vault-core/src/core/index.ts +++ b/packages/vault-core/src/core/index.ts @@ -1,2 +1,5 @@ export * from './adapter'; +export * from './codec'; +export * from './ingestor'; +export * from './migrations'; export * from './vault'; diff --git a/packages/vault-core/src/core/ingestor.ts b/packages/vault-core/src/core/ingestor.ts new file mode 100644 index 0000000000..e0a4590456 --- /dev/null +++ b/packages/vault-core/src/core/ingestor.ts @@ -0,0 +1,24 @@ +/** + * An ingestor is responsible to parsing one or more input files into a normalized + * payload that can be validated and upserted by an Adapter. This is completely separate + * from the vault import/export lifecycle. + */ +export type Ingestor = { + /** Return true if this ingestor can handle the provided files */ + matches(file: File): boolean; + /** Parse files into a normalized payload expected by validator/upsert */ + parse(file: File): Promise; +}; + +/** + * Define an Ingestor with full type inference. + * + * @param ingestor The ingestor implementation + * @param T The shape of the parsed payload (default: unknown) + * @returns The same ingestor, with types inferred + */ +export function defineIngestor( + ingestor: Ingestor, +): Ingestor { + return ingestor; +} diff --git a/packages/vault-core/src/core/migrations.ts b/packages/vault-core/src/core/migrations.ts new file mode 100644 index 0000000000..0b02e06e70 --- /dev/null +++ b/packages/vault-core/src/core/migrations.ts @@ -0,0 +1,962 @@ +/** + * Environment-agnostic migration primitives for vault-core. + * + * This module intentionally performs no IO and imports no node: modules. + * Hosts must provide any filesystem/database access and drizzle-kit integration. + * + * Design: + * - Core accepts already-parsed data structures (journals, step metadata) and + * provides pure planning helpers and progress event types. + * - Execution is performed by host-injected strategies; core only defines shapes. + */ + +import type { InferInsertModel } from 'drizzle-orm'; +import { eq, sql } from 'drizzle-orm'; +import { + integer, + type SQLiteTable, + sqliteTable, + text, +} from 'drizzle-orm/sqlite-core'; +import type { DrizzleDb } from './db'; + +/** Drizzle migration journal entry (parsed from meta/_journal.json by the host). */ +export type JournalEntry = { + /** Drizzle migration tag, usually the filename without extension (e.g., 0001_add_posts) */ + tag: string; + /** ISO timestamp or number; shape depends on drizzle-kit version */ + when?: unknown; + /** Optional checksum/hash; presence depends on drizzle version */ + hash?: string; +}; + +/** Parsed Drizzle journal object (host-provided; core does not read files). */ +export type MigrationJournal = { + entries: JournalEntry[]; +}; + +/** + * A pure planning result representing the ordered set of tags needed to move + * from the current tag (if any) to the target tag. Core does not include SQL + * statements here; hosts generate SQL via drizzle-kit as needed. + */ +export type MigrationPlan = { + /** Current DB version tag (if known). */ + from: string | undefined; + /** Target version tag (must exist in the journal). */ + to: string; + /** Ordered list of tags to apply to reach the target. */ + tags: string[]; +}; + +/** + * Compute a forward-only plan from current -> target using journal ordering. + * Pure function: no IO, no environment assumptions. + */ +export function planToVersion( + journal: MigrationJournal, + currentTag: string | undefined, + targetTag: string, +): MigrationPlan { + const order = new Map(journal.entries.map((e, i) => [e.tag, i] as const)); + + const targetIdx = order.get(targetTag); + if (targetIdx === undefined) + throw new Error(`Target migration tag not found in journal: ${targetTag}`); + + const currentIdx = currentTag != null ? (order.get(currentTag) ?? -1) : -1; + + // Downgrade paths are not supported by this planner; hosts can implement if needed. + if (currentIdx > targetIdx) + throw new Error( + `Current tag (${currentTag}) is ahead of target tag (${targetTag}); downgrades are not supported in core planner.`, + ); + + const forward = journal.entries + .slice(currentIdx + 1, targetIdx + 1) + .map((e) => e.tag); + + return { from: currentTag, to: targetTag, tags: forward }; +} + +/** + * Progress event and reporter types for host-executed migrations. + * These are emitted by host executors while running the planned steps/tags. + */ +export type ProgressEvent = + | { + type: 'start'; + totalSteps: number; + } + | { + type: 'step'; + index: number; // 0-based index in the overall plan + tag: string; // current tag being applied + progress?: number; // optional 0..1 + message?: string; + } + | { + type: 'complete'; + } + | { + type: 'error'; + error: unknown; + }; + +/** Reporter callbacks; hosts pass an implementation to their executor. */ +export type ProgressReporter = { + onStart(event: Extract): void; + onStep(event: Extract): void; + onComplete(event: Extract): void; + onError(event: Extract): void; +}; + +/** + * Host integration points (shapes only; no implementation in core): + * + * - MigrationPlanner: optional advanced planner that can consider multiple adapter + * version tuples, curated steps, or drizzle-kit diffs to assemble a cross-plugin plan. + * This is left opaque. + * + * - MigrationExecutor: applies a plan to the injected database and emits ProgressReporter events. + * Core does not require a specific DB type here to remain environment-agnostic. + */ +export type MigrationPlanner = (...args: unknown[]) => MigrationPlan; + +export type MigrationExecutor = ( + plan: MigrationPlan, + report: ProgressReporter, +) => Promise; + +// ============================== +// Plan B: Inline migration (SQLite/libsql) execution helpers +// Design: keep core environment-agnostic; host supplies Drizzle internals via 'engine'. +// We skip validators on purpose and rely on 'squash' + differ to derive SQL. +// The implementation is commented out below but retained for potential future use. +// ============================== + +// /** Supported sqlite-like dialects for inline diffing. */ +// export type SqliteLikeDialect = 'sqlite' | 'libsql'; + +// /** Optional migration mode forwarded to differ; 'push' mirrors drizzle example semantics. */ +// export type MigrationMode = 'migrate' | 'push'; + +// /** Resolver input/output shapes kept generic yet typed; avoid any. */ +// export type TableResolverInput = { +// created?: unknown[]; +// deleted?: unknown[]; +// [k: string]: unknown; +// }; +// export type TableResolverOutput = { +// created: unknown[]; +// deleted: unknown[]; +// moved: unknown[]; +// renamed: unknown[]; +// }; + +// export type ColumnResolverInput = { +// tableName?: unknown; +// schema?: unknown; +// created?: unknown[]; +// deleted?: unknown[]; +// [k: string]: unknown; +// }; +// export type ColumnResolverOutput = { +// tableName?: unknown; +// schema?: unknown; +// created: unknown[]; +// deleted: unknown[]; +// renamed: unknown[]; +// }; + +// export type ViewResolverInput = { +// created?: unknown[]; +// deleted?: unknown[]; +// [k: string]: unknown; +// }; +// export type ViewResolverOutput = { +// created: unknown[]; +// deleted: unknown[]; +// moved: unknown[]; +// renamed: unknown[]; +// }; + +// /** A minimal set of resolvers used by sqlite/libsql snapshot diff functions. */ +// export type SqliteResolvers = { +// tablesResolver: ( +// input: TableResolverInput, +// ) => Promise | TableResolverOutput; +// columnsResolver: ( +// input: ColumnResolverInput, +// ) => Promise | ColumnResolverOutput; +// viewsResolver: ( +// input: ViewResolverInput, +// ) => Promise | ViewResolverOutput; +// /** Optional schemas resolver; not required by sqlite/libsql differ in current examples. */ +// schemasResolver?: ( +// input: TableResolverInput, +// ) => Promise | TableResolverOutput; +// }; + +// /** Default, non-interactive resolvers: create-only for new, drop-only for deleted, no renames/moves. */ +// export const defaultSqliteResolvers: SqliteResolvers = { +// tablesResolver(input: TableResolverInput) { +// Expect shape { created: T[], deleted: T[] } +// return { +// created: Array.isArray(input.created) ? input.created : [], +// deleted: Array.isArray(input.deleted) ? input.deleted : [], +// moved: [], +// renamed: [], +// }; +// }, +// columnsResolver(input: ColumnResolverInput) { +// Expect shape { tableName, created: T[], deleted: T[] } +// return { +// tableName: input.tableName, +// schema: input.schema, +// created: Array.isArray(input.created) ? input.created : [], +// deleted: Array.isArray(input.deleted) ? input.deleted : [], +// renamed: [], +// }; +// }, +// viewsResolver(input: ViewResolverInput) { +// Expect shape { created: T[], deleted: T[] } +// return { +// created: Array.isArray(input.created) ? input.created : [], +// deleted: Array.isArray(input.deleted) ? input.deleted : [], +// moved: [], +// renamed: [], +// }; +// }, +// }; + +// /** Result contract returned by drizzle snapshot differs that we use. */ +// export type SqlDiffResult = { +// sqlStatements: string[]; +// drizzle may also include auxiliary outputs, we keep them if present +// statements?: unknown; +// _meta?: unknown; +// }; + +// /** Function signature of sqlite snapshot differ (applySqliteSnapshotsDiff). */ +// export type ApplySqliteSnapshotsDiff = ( +// squashedPrev: unknown, +// squashedCur: unknown, +// tablesResolver: ( +// input: TableResolverInput, +// ) => Promise | TableResolverOutput, +// columnsResolver: ( +// input: ColumnResolverInput, +// ) => Promise | ColumnResolverOutput, +// viewsResolver: ( +// input: ViewResolverInput, +// ) => Promise | ViewResolverOutput, +// validatedPrev: unknown, +// validatedCur: unknown, +// mode?: 'push', +// ) => Promise | SqlDiffResult; + +// /** Function signature of libsql snapshot differ (applyLibSQLSnapshotsDiff). */ +// export type ApplyLibSQLSnapshotsDiff = ApplySqliteSnapshotsDiff; + +// /** Function signature of sqlite squasher (squashSqliteScheme). */ +// export type SquashSqliteScheme = (snapshot: unknown, mode?: 'push') => unknown; + +// /** Drizzle-engine functions needed to run sqlite/libsql diffs. Host provides these concretions. */ +// export type SqliteEngine = { +// squashSqliteScheme: SquashSqliteScheme; +// applySqliteSnapshotsDiff?: ApplySqliteSnapshotsDiff; +// applyLibSQLSnapshotsDiff?: ApplyLibSQLSnapshotsDiff; +// }; + +// /** Options for SQL generation helpers. */ +// export type GenerateSqlOptions = { +// mode?: MigrationMode; +// resolvers?: Partial; +// engine: SqliteEngine; +// }; + +// /** Merge user resolvers over defaults (shallow). */ +// function mergeResolvers( +// base: SqliteResolvers, +// partial?: Partial, +// ): SqliteResolvers { +// if (!partial) return base; +// return { +// tablesResolver: partial.tablesResolver ?? base.tablesResolver, +// columnsResolver: partial.columnsResolver ?? base.columnsResolver, +// viewsResolver: partial.viewsResolver ?? base.viewsResolver, +// schemasResolver: partial.schemasResolver ?? base.schemasResolver, +// }; +// } + +// /** +// * Pure SQL generator for SQLite using provided drizzle-engine internals. +// * prev/cur are raw snapshots; we skip validators and only squash. +// */ +// export async function generateSqlForSqlite( +// prev: unknown, +// cur: unknown, +// opts: GenerateSqlOptions, +// ): Promise { +// const { engine } = opts; +// if (!engine.squashSqliteScheme || !engine.applySqliteSnapshotsDiff) { +// throw new Error( +// 'SQLite differ not available: ensure engine.squashSqliteScheme and engine.applySqliteSnapshotsDiff are provided', +// ); +// } +// const modePush = opts.mode === 'push' ? 'push' : undefined; +// const resolvers = mergeResolvers(defaultSqliteResolvers, opts.resolvers); + +// const squashedPrev = engine.squashSqliteScheme(prev, modePush); +// const squashedCur = engine.squashSqliteScheme(cur, modePush); + +// const { sqlStatements } = await engine.applySqliteSnapshotsDiff( +// squashedPrev, +// squashedCur, +// resolvers.tablesResolver, +// resolvers.columnsResolver, +// resolvers.viewsResolver, +// We pass raw snapshots through as "validated" to avoid pulling validators +// prev, +// cur, +// modePush, +// ); + +// return sqlStatements ?? []; +// } + +// /** +// * Pure SQL generator for libSQL using provided drizzle-engine internals. +// * prev/cur are raw snapshots; we skip validators and only squash. +// */ +// export async function generateSqlForLibsql( +// prev: unknown, +// cur: unknown, +// opts: GenerateSqlOptions, +// ): Promise { +// const { engine } = opts; +// if (!engine.squashSqliteScheme || !engine.applyLibSQLSnapshotsDiff) { +// throw new Error( +// 'libSQL differ not available: ensure engine.squashSqliteScheme and engine.applyLibSQLSnapshotsDiff are provided', +// ); +// } +// const modePush = opts.mode === 'push' ? 'push' : undefined; +// const resolvers = mergeResolvers(defaultSqliteResolvers, opts.resolvers); + +// const squashedPrev = engine.squashSqliteScheme(prev, modePush); +// const squashedCur = engine.squashSqliteScheme(cur, modePush); + +// const { sqlStatements } = await engine.applyLibSQLSnapshotsDiff( +// squashedPrev, +// squashedCur, +// resolvers.tablesResolver, +// resolvers.columnsResolver, +// resolvers.viewsResolver, +// We pass raw snapshots through as "validated" to avoid pulling validators +// prev, +// cur, +// modePush, +// ); + +// return sqlStatements ?? []; +// } + +// /** Execution helper: sequentially run statements using provided executor. */ +// export async function executeSqlStatements( +// statements: string[], +// execute: (sql: string) => Promise, +// ): Promise { +// for (const sql of statements) { +// Execute in order; SQLite DDL may auto-commit, so we avoid wrapping in a single tx here. +// await execute(sql); +// } +// } + +// /** Snapshot provider per tag used by the orchestrator. */ +// export type SnapshotProvider = ( +// tag: string, +// ) => { prev: unknown; cur: unknown } | Promise<{ prev: unknown; cur: unknown }>; + +// /** Orchestrator options for running a plan end-to-end. */ +// export type RunInlineOptions = { +// dialect: SqliteLikeDialect; +// mode?: MigrationMode; +// engine: SqliteEngine; +// validate?: boolean; // reserved for future; unused because validators are skipped by design +// /** If true and 'execute' provided, statements are applied; otherwise dry-run returns statements only. */ +// apply?: boolean; +// /** Execution callback for applying SQL; required when apply is true. */ +// execute?: (sql: string) => Promise; +// /** Optional progress reporter from this module. */ +// reporter?: ProgressReporter; +// }; + +// /** Orchestrate plan execution: generate per-tag SQL and optionally apply via execute callback. */ +// export async function runPlannedMigrationsInline( +// plan: MigrationPlan, +// getSnapshotsByTag: SnapshotProvider, +// options: RunInlineOptions, +// ): Promise<{ byTag: Record }> { +// const { dialect, mode, engine, apply, execute, reporter } = options; + +// if (apply && !execute) { +// throw new Error('apply is true but no execute callback was provided'); +// } + +// reporter?.onStart({ type: 'start', totalSteps: plan.tags.length }); + +// const byTag: Record = {}; +// for (let i = 0; i < plan.tags.length; i++) { +// const tag = plan.tags[i]; + +// try { +// const pair = await getSnapshotsByTag(tag); +// const prev = pair.prev; +// const cur = pair.cur; + +// let statements: string[]; +// if (dialect === 'sqlite') { +// statements = await generateSqlForSqlite(prev, cur, { mode, engine }); +// } else if (dialect === 'libsql') { +// statements = await generateSqlForLibsql(prev, cur, { mode, engine }); +// } else { +// throw new Error( +// `Unsupported dialect for inline migrations: ${dialect}`, +// ); +// } + +// byTag[tag] = statements; + +// if (apply && execute && statements.length > 0) { +// for (const [idx, sql] of statements.entries()) { +// reporter?.onStep({ +// type: 'step', +// index: i, +// tag, +// progress: statements.length > 0 ? (idx + 1) / statements.length : 1, +// message: `Applying statement ${idx + 1} of ${statements.length}`, +// }); +// await execute(sql); +// } +// } else { +// reporter?.onStep({ +// type: 'step', +// index: i, +// tag, +// progress: 1, +// message: `Generated ${statements.length} statements (dry run)`, +// }); +// } +// } catch (error) { +// reporter?.onError({ type: 'error', error }); +// throw error; +// } +// } + +// reporter?.onComplete({ type: 'complete' }); + +// return { byTag }; +// } + +// ============================== +// Plan A: adapter versions, vault-managed ledger, startup SQL, and data transform chain +// ============================== + +/** Vault-managed migration tables: SQL schema strings hosts can execute. */ +const VAULT_MIGRATIONS_TABLE_NAME = 'vault_migrations'; +const VAULT_MIGRATIONS_SQL = ` +CREATE TABLE IF NOT EXISTS vault_migrations ( + adapter_id TEXT PRIMARY KEY, + current_tag TEXT NOT NULL, + updated_at INTEGER NOT NULL +);`; +const VAULT_MIGRATIONS_TABLE = sqliteTable(VAULT_MIGRATIONS_TABLE_NAME, { + adapter_id: text('adapter_id').primaryKey(), + current_tag: text('current_tag').notNull(), + updated_at: integer('updated_at').notNull(), +}); + +const VAULT_MIGRATION_JOURNAL_TABLE_NAME = 'vault_migration_journal'; +const VAULT_MIGRATION_JOURNAL_SQL = ` +CREATE TABLE IF NOT EXISTS vault_migration_journal ( + adapter_id TEXT NOT NULL, + tag TEXT NOT NULL, + applied_at INTEGER NOT NULL, + PRIMARY KEY (adapter_id, tag) +);`; +const VAULT_MIGRATION_JOURNAL_TABLE = sqliteTable( + VAULT_MIGRATION_JOURNAL_TABLE_NAME, + { + adapter_id: text('adapter_id').notNull(), + tag: text('tag').notNull(), + applied_at: integer('applied_at').notNull(), + }, +); + +export async function ensureVaultLedgerTables(db: DrizzleDb): Promise { + await db.run(sql.raw(VAULT_MIGRATIONS_SQL)); + await db.run(sql.raw(VAULT_MIGRATION_JOURNAL_SQL)); +} + +export async function getVaultLedgerTag( + db: DrizzleDb, + adapterId: string, +): Promise { + await ensureVaultLedgerTables(db); + // const row = await db.get<{ current_tag: string | null }>( + // sql`SELECT current_tag FROM ${sql.raw(VAULT_MIGRATIONS_TABLE_NAME)} WHERE adapter_id = ${adapterId}`, + // ); + const row = await db + .select() + .from(VAULT_MIGRATIONS_TABLE) + .where(eq(VAULT_MIGRATIONS_TABLE.adapter_id, adapterId)) + .limit(1) + .get(); + + return row?.current_tag ?? undefined; +} + +async function setVaultLedgerTag( + db: DrizzleDb, + adapterId: string, + tag: string, +): Promise { + await ensureVaultLedgerTables(db); + const timestamp = Date.now(); + + // Upsert semantics: update existing row or insert a new one + await db + .insert(VAULT_MIGRATIONS_TABLE) + .values({ + adapter_id: adapterId, + current_tag: tag, + updated_at: timestamp, + }) + .onConflictDoUpdate({ + target: VAULT_MIGRATIONS_TABLE.adapter_id, + set: { current_tag: tag, updated_at: timestamp }, + }); +} + +async function appendVaultLedgerJournal( + db: DrizzleDb, + adapterId: string, + tag: string, +): Promise { + await ensureVaultLedgerTables(db); + const timestamp = Date.now(); + await db + .insert(VAULT_MIGRATION_JOURNAL_TABLE) + .values({ + adapter_id: adapterId, + tag, + applied_at: timestamp, + }) + .onConflictDoNothing(); +} + +/** Build a pseudo-journal from a versions tuple to reuse planToVersion. */ +export function buildJournalFromVersions< + TVersions extends readonly VersionDef[], +>(versions: TVersions): MigrationJournal { + return { + entries: versions.map((v) => ({ tag: v.tag })), + }; +} + +/** + * Startup SQL migration runner for a single adapter. + * Forward-only: computes steps from the ledger's current tag to the latest version. + */ +export async function runStartupSqlMigrations< + TID extends string, + /* + `defineAdapter` discriminates tags, `Adapter` doesn't, so we don't want to constrain TVersions. + Besides, we perform a runtime check on versions, so that is sufficient. + */ + // TVersions extends readonly VersionDef[], +>( + adapterId: TID, + // versions: TVersions, + versions: readonly VersionDef[], + db: DrizzleDb, + reporter?: ProgressReporter, +): Promise<{ applied: string[] }> { + if (!versions || versions.length === 0) { + return { applied: [] }; + } + + await ensureVaultLedgerTables(db); + + const target = getLatestTag(versions); + const current = await getVaultLedgerTag(db, adapterId); + + const plan = planToVersion( + buildJournalFromVersions(versions), + current, + target, + ); + + const r = reporter; + r?.onStart({ type: 'start', totalSteps: plan.tags.length }); + + const applied: string[] = []; + + for (let i = 0; i < plan.tags.length; i++) { + const tag = plan.tags[i]; + if (!tag) throw new Error(`Invalid tag at plan index ${i}`); + + const ve = versions.find((v) => v.tag === tag); + if (!ve) { + const error = new Error(`Version entry not found for tag ${tag}`); + reporter?.onError({ type: 'error', error }); + throw error; + } + + const statements = ve.sql; + + if (statements.length === 0) { + r?.onStep({ + type: 'step', + index: i, + tag, + progress: 1, + message: 'No SQL statements for this version', + }); + } else { + for (const [idx, statement] of statements.entries()) { + const preview = statement.replace(/\s+/g, ' ').slice(0, 120); + r?.onStep({ + type: 'step', + index: i, + tag, + progress: (idx + 1) / statements.length, + message: `Applying statement ${idx + 1}/${statements.length}: ${preview}...`, + }); + + try { + await db.run(sql.raw(statement)); + } catch (e) { + // Hard failure: bubble up with detailed error + r?.onError({ type: 'error', error: e }); + throw e; + } + } + } + + await appendVaultLedgerJournal(db, adapterId, tag); + await setVaultLedgerTag(db, adapterId, tag); + applied.push(tag); + } + + r?.onComplete({ type: 'complete' }); + + return { applied }; +} + +/** Additional metadata supplied to individual transform functions for better DX. */ +export type DataTransformContext = { + /** Target tag that the transform will produce. */ + toTag: Tag4; + /** Source tag feeding into this transform (previous tag or dataset tag). */ + fromTag?: string; + /** Optional initial source tag provided by the caller. */ + sourceTag?: string; + /** Final tag the chain is targeting. */ + targetTag: Tag4; + /** Zero-based index of this step in the plan. */ + index: number; + /** Total number of steps in the current transform plan. */ + total: number; + /** Whether this step is the final transform in the chain. */ + isLast: boolean; + /** Ordered list of target tags that will be applied (excludes the baseline). */ + plan: readonly Tag4[]; + /** Full adapter versions tuple for additional context. */ + versions: readonly VersionDef[]; +}; + +/** A data transform converts JSON shaped as version A to JSON shaped as version B (adapter-specific). */ +export type DataTransform = ( + input: unknown, + context: DataTransformContext, +) => unknown | Promise; + +/** + * Registry of per-version transforms: + * Map of toTag => transform that expects input of previous tag and produces output of toTag. + * Example: { '0001': t_0000_to_0001, '0002': t_0001_to_0002 } + */ +export type TransformRegistry = Record< + TTag, + DataTransform +>; + +/** Convenience alias for versions tuples to derive required transform keys. */ +export type TransformRegistryForVersions< + TVersions extends readonly VersionDef[], +> = TransformRegistry>; + +/** Determine the forward tag list using the versions tuple, from sourceTag (exclusive) to targetTag (inclusive). */ +export function computeForwardTagsFromVersions< + TVersions extends readonly VersionDef[], +>( + versions: TVersions, + sourceTag: string | undefined, + targetTag: string, +): string[] { + // If no sourceTag (no metadata provided), treat the baseline (first version) + // as the current tag so we do NOT require a transform for '0000'. + const baseline = versions.length > 0 ? versions[0]?.tag : undefined; + const current = sourceTag ?? baseline; + + const j = buildJournalFromVersions(versions); + return planToVersion(j, current, targetTag).tags; +} + +/** + * Run the data transform chain from sourceTag -> latest version. + * The registry must contain a transform for each target tag in the forward plan. + */ +export async function runDataTransformChain< + TTags extends Tag4, + TVersions extends readonly VersionDef[], + TSchema extends Record, +>( + versions: TVersions, + registry: TransformRegistryForVersions, + input: { + [Key in keyof TSchema]: InferInsertModel[]; + }, + // If undefined, starts from the baseline (first version in the tuple) + sourceTag: string | undefined, + targetTag?: string, + reporter?: ProgressReporter, +): Promise { + const plannedTarget = (targetTag ?? getLatestTag(versions)) as Tag4; + const tags = computeForwardTagsFromVersions( + versions, + sourceTag, + plannedTarget, + ); + const plannedTags = tags.map((tag) => tag as Tag4); + const previousTagByTarget = new Map(); + for (let i = 0; i < versions.length; i++) { + const currentTag = versions[i]?.tag as Tag4; + const prev = versions[i - 1]?.tag; + previousTagByTarget.set(currentTag, prev); + } + + reporter?.onStart({ type: 'start', totalSteps: tags.length }); + + let acc: unknown = input; + type RequiredTags = RequiredTransformTags; + for (let i = 0; i < plannedTags.length; i++) { + const toTag = plannedTags[i]; + if (!toTag) throw new Error(`Invalid planned tag at index ${i}`); + + const fn = registry[toTag as RequiredTags]; + if (!fn) { + const err = new Error(`Missing transform for target tag ${toTag}`); + reporter?.onError({ type: 'error', error: err }); + throw err; + } + const fromTag = + i === 0 + ? (sourceTag ?? previousTagByTarget.get(toTag)) + : plannedTags[i - 1]; + if (!fromTag) + throw new Error(`Cannot determine fromTag for target tag ${toTag}`); + if (!sourceTag) + throw new Error( + `Transform chain mismatch: expected fromTag ${fromTag} to match sourceTag ${sourceTag}`, + ); + + acc = await fn(acc, { + toTag, + fromTag, + sourceTag, + targetTag: plannedTarget, + index: i, + total: plannedTags.length, + isLast: i === plannedTags.length - 1, + plan: plannedTags, + versions, + }); + reporter?.onStep({ + type: 'step', + index: i, + tag: toTag, + progress: 1, + message: `Transformed to ${toTag}`, + }); + } + + reporter?.onComplete({ type: 'complete' }); + return acc; +} + +function getLatestTag[]>( + versions: TVersions, +): TVersions[number]['tag'] { + // Return the numerically greatest tag (e.g., '0003' over '0002'/'0000') + const sorted = versions + .map((v) => [v.tag, Number.parseInt(v.tag, 10)] as const) + .sort((a, b) => a[1] - b[1]); + const result = sorted[sorted.length - 1]?.[0]; + if (!result) + throw new Error('Cannot determine latest tag from versions tuple'); + + return result; +} + +// ============================== +// Version tuple type-safety helpers (authoring-time) +// ============================== + +type Digit = '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9'; + +/** Four-digit tag, e.g. '0000', '0001'. */ +export type Tag4 = `${Digit}${Digit}${Digit}${Digit}`; + +/** + * Version definition for adapter-managed migrations (stricter than runtime). + * + * We aren't using Tag4 here. This is because `Adapter` itself doesn't discriminate, + * or else it would cause headaches for anything that uses `Adapter` generically. + * `defineAdapter` serves as the dev-time assertion for adapter authors. + */ +export type VersionDef = { + /** Four-digit version tag (e.g., '0001'). Must be unique within the tuple. */ + tag: TTag; + /** Inline array of statements (preferred for environment-agnostic bundles) */ + sql: string[]; +}; + +/* Tuple utilities */ +type FirstOfTuple = T extends readonly [ + infer F, + ...unknown[], +] + ? F + : never; + +/** Extract the union of tags from a version tuple. */ +export type VersionTags[]> = + TVersions[number]['tag']; + +/** First tag from versions tuple. */ +export type FirstTag[]> = + FirstOfTuple extends VersionDef + ? FirstOfTuple['tag'] + : never; + +/** Tag tuple derived from a VersionDef tuple. */ +export type VersionTagTuple[]> = { + [K in keyof TVersions]: TVersions[K] extends VersionDef< + infer TTag extends string + > + ? TTag + : never; +}; + +/** Tuple of required forward transform tags (all tags except the first/baseline). */ +export type RequiredTransformTagTuple< + TVersions extends readonly VersionDef[], +> = TVersions extends readonly [VersionDef, ...infer Rest] + ? Rest extends readonly VersionDef[] + ? { + [K in keyof Rest]: Rest[K] extends VersionDef + ? Tag + : never; + } + : [] + : []; + +/** + * Compute the tags that require data transforms: + * all version tags except the first (baseline). + */ +export type RequiredTransformTags< + TVersions extends readonly VersionDef[], +> = RequiredTransformTagTuple[number]; + +/** + * Authoring-time transform registry keyed by the required transform tags. + * Example: + * const transforms: TransformRegistryForVersions = + * { '0001': fn, '0002': fn }; + */ + +/** + * Helper to define a transform registry with compile-time keys. + * Provide the union of tags you must cover: + * const transforms = defineTransformRegistry({ '0001': fn, '0002': fn }); + * + * Each transform receives a {@link DataTransformContext} describing the plan, + * which enables richer DX (branching, instrumentation, etc.). + */ +export function defineTransformRegistry< + TRegistry extends Partial>, +>(registry: TRegistry): TRegistry { + return registry; +} + +/** Validate transformed data using an injected validator (e.g., drizzle-arktype). Returns morphed value. */ +export type DataValidator = (value: unknown) => unknown | Promise; + +/** Run chain then validate; returns morphed/validated data if no exception is thrown. */ +export async function transformAndValidate< + TVersions extends readonly VersionDef[], + TSchema extends Record, +>( + versions: TVersions, + registry: TransformRegistryForVersions, + input: { + [Key in keyof TSchema]: InferInsertModel[]; + }, + sourceTag: string | undefined, + validator?: DataValidator, + targetTag?: string, + reporter?: ProgressReporter, +): Promise { + const transformed = await runDataTransformChain( + versions, + registry, + input, + sourceTag, + targetTag, + reporter, + ); + const validated = validator ? await validator(transformed) : transformed; + return validated; +} + +/** + * Helper to define a versions tuple without needing "as const" at call sites. + * Preserves literal tag types across the tuple. + * + * Example: + * const versions = defineVersions( + * { tag: '0000', sqlText: 'CREATE TABLE ...;' }, + * { tag: '0001', sqlText: '' }, + * ); + */ +export function defineVersions[]>( + ...versions: TVersions +): TVersions { + return versions; +} + +/** + * Derive the union of adapter table names from any shape that exposes a "schema" record. + * This avoids coupling to the Adapter type while allowing type-safe table-name usage in hosts. + * + * Example: + * type Tables = AdapterTableNames; + * // Tables is the union of keys of adapter.schema (as strings) + */ +export type AdapterTableNames = A extends { schema: Record } + ? Extract + : never; diff --git a/packages/vault-core/src/core/strip.ts b/packages/vault-core/src/core/strip.ts index ccf44e5640..8de1de3e37 100644 --- a/packages/vault-core/src/core/strip.ts +++ b/packages/vault-core/src/core/strip.ts @@ -1,5 +1,7 @@ -import type { SQLiteColumn, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import type { AnyColumn } from 'drizzle-orm'; + import type { ColumnDescriptions } from './adapter'; +import type { AnyTable } from './db'; export type ReadableColumnInfo = { name: string; @@ -18,11 +20,11 @@ export type ReadableTableInfo = { * * When metadata is provided, matching table/column descriptions are included under `description`. */ -export function readableSchemaInfo>( +export function readableSchemaInfo>( schema: TSchema, metadata?: ColumnDescriptions, ): ReadableTableInfo[] { - const tables = Object.entries(schema) as [string, SQLiteTable][]; + const tables = Object.entries(schema); return tables.map(([name, table]) => ({ name, columns: readableTableInfo( @@ -33,10 +35,10 @@ export function readableSchemaInfo>( } function readableTableInfo( - table: SQLiteTable, + table: AnyTable, tableMetadata?: Record, ) { - const columns = Object.entries(table) as [string, SQLiteColumn][]; + const columns = Object.entries(table._.columns); return columns.map(([name, col]) => readableColumnInfo(name, col, tableMetadata?.[name]), ); @@ -44,7 +46,7 @@ function readableTableInfo( function readableColumnInfo( name: string, - column: SQLiteColumn, + column: AnyColumn, description?: string, ): ReadableColumnInfo { // Add other fields here we wish to expose @@ -52,6 +54,6 @@ function readableColumnInfo( name, type: column.dataType, nullable: !column.notNull, - } as const; + }; return description !== undefined ? { ...base, description } : base; } diff --git a/packages/vault-core/src/core/vault.spec.ts b/packages/vault-core/src/core/vault.spec.ts new file mode 100644 index 0000000000..ced6bdb9cb --- /dev/null +++ b/packages/vault-core/src/core/vault.spec.ts @@ -0,0 +1,159 @@ +import Database from 'bun:sqlite'; +import { test } from 'bun:test'; +import { fail } from 'node:assert'; +import assert from 'node:assert/strict'; +import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { + createTestAdapter, + ingestSchema, + invalidIngestData, + makeImportFiles, + makeIngestFile, + TEST_ADAPTER_ID, + testSchema, + validIngestData, +} from '../../tests/fixtures/testAdapter'; +import { jsonFormat } from '../codecs/json'; +import { getVaultLedgerTag, runStartupSqlMigrations } from './migrations'; +import { createVault } from './vault'; + +function createDatabase() { + const sqlite = new Database(':memory:'); + const db = drizzle(sqlite); + return { sqlite, db }; +} + +function createVaultInstance() { + const { sqlite, db } = createDatabase(); + const adapter = createTestAdapter(); + const vault = createVault({ + database: db, + adapters: [adapter], + }); + return { sqlite, db, adapter, vault }; +} + +test('runStartupSqlMigrations applies schema and updates ledger', async () => { + const { sqlite, db } = createDatabase(); + const adapter = createTestAdapter(); + try { + const result = await runStartupSqlMigrations( + adapter.id, + adapter.versions, + db, + ); + assert.deepEqual(result.applied, ['0000']); + + // Should throw if table is not present + await db.select().from(testSchema.test_items); + + const ledgerTag = await getVaultLedgerTag(db, adapter.id); + assert.equal(ledgerTag, '0000'); + } finally { + sqlite.close(); + } +}); + +test('ingestData stores rows and exposes them via query interface', async () => { + const { sqlite, db, adapter, vault } = createVaultInstance(); + try { + const file = makeIngestFile(validIngestData); + await vault.ingestData({ adapter, file }); + + const rows = await db.select().from(testSchema.test_items); + const ingested = ingestSchema(validIngestData); + if ('issues' in ingested) throw new Error('Ingested data is invalid'); + + assert.equal(rows.length, ingested.items.length); + + const { db: queryDb, tables } = vault.getQueryInterface(); + const qiSchema = tables[TEST_ADAPTER_ID] as typeof testSchema; + const qiRows = await queryDb.select().from(qiSchema.test_items); + assert.equal(qiRows.length, validIngestData.items.length); + } finally { + sqlite.close(); + } +}); + +test('ingestData rejects invalid payloads', async () => { + const { sqlite, adapter, vault } = createVaultInstance(); + try { + const invalidFile = makeIngestFile(invalidIngestData, 'invalid.json'); + await assert.rejects( + () => vault.ingestData({ adapter, file: invalidFile }), + /validation/i, + ); + } finally { + sqlite.close(); + } +}); + +test('importData inserts rows when validator succeeds', async () => { + const { sqlite, db, vault } = createVaultInstance(); + try { + const ingested = ingestSchema(validIngestData); + if ('issues' in ingested) fail('Ingested data is invalid'); + const files = makeImportFiles(ingested); + + await vault.importData({ + files, + codec: jsonFormat, + }); + + const rows = await db.select().from(testSchema.test_items); + assert.equal(rows.length, validIngestData.items.length); + } finally { + sqlite.close(); + } +}); + +test('importData propagates validator errors', async () => { + const { vault } = createVaultInstance(); + assert.rejects(async () => { + const ingested = ingestSchema(invalidIngestData); + // @ts-expect-error invalid data for test + const files = makeImportFiles(ingested); + vault.importData({ + files, + codec: jsonFormat, + }); + }); +}); + +test('smoke: ingest, export, and import round trip', async () => { + const { + sqlite: sourceSqlite, + vault: sourceVault, + adapter: sourceAdapter, + } = createVaultInstance(); + try { + const ingestFile = makeIngestFile(validIngestData); + await sourceVault.ingestData({ adapter: sourceAdapter, file: ingestFile }); + + const exported = await sourceVault.exportData({ codec: jsonFormat }); + const { sqlite: destSqlite, vault: destVault } = createVaultInstance(); + + try { + const importFiles = new Map(); + for (const [path, file] of exported) { + const contents = await file.text(); + importFiles.set(path, { + name: file.name, + type: file.type, + async text() { + return contents; + }, + } as unknown as File); + } + + await destVault.importData({ + files: importFiles, + codec: jsonFormat, + }); + } finally { + destSqlite.close(); + } + } finally { + sourceSqlite.close(); + } +}); diff --git a/packages/vault-core/src/core/vault.ts b/packages/vault-core/src/core/vault.ts index 070d5608fa..3c0cf8aaf0 100644 --- a/packages/vault-core/src/core/vault.ts +++ b/packages/vault-core/src/core/vault.ts @@ -1,144 +1,407 @@ -import type { MigrationConfig } from 'drizzle-orm/migrator'; -import type { Adapter, CompatibleDB } from './adapter'; -import type { VaultConfig } from './config'; -import { readableSchemaInfo } from './strip'; - -export type ImportCounts = Record; - -export type ImportReport = { - adapter: string; - migrated: boolean; - counts: ImportCounts; - // Raw parsed payload for advanced callers; leave as unknown to avoid tight coupling - parsed: unknown; -}; - -export type ImportSummary = { - reports: ImportReport[]; - totalTables: number; - totalRecords: number; -}; - -function countRecords(parsed: unknown): ImportCounts { - const out: ImportCounts = {}; - if (parsed && typeof parsed === 'object') { - for (const [k, v] of Object.entries(parsed as Record)) { - out[k] = Array.isArray(v) ? v.length : 0; - } +import { createSelectSchema } from 'drizzle-arktype'; +import type { Adapter, UniqueAdapters } from './adapter'; +import { + defaultConvention, + listColumns, + listPrimaryKeys, + listTables, +} from './codec'; +import type { + AdapterIDs, + AdapterTableMap, + CoreOptions, + ExportOptions, + ImportOptions, + IngestOptions, + Vault, +} from './config'; +import type { CompatibleDB } from './db'; +import { runImportPipeline } from './import/importPipeline'; +import { + createMigrationMetadataFile, + MIGRATION_META_DIR, +} from './import/migrationMetadata'; +import { runStartupSqlMigrations } from './migrations'; + +/** Minimal Drizzle-DB type expected by core. Hosts pass a concrete Drizzle instance. */ +export type DrizzleDb = CompatibleDB; + +/** + * Construct a Vault bound to a Drizzle DB. No IO; pure orchestration. + */ +export function createVault( + options: CoreOptions, +): Vault { + const db = options.database; + + // Early validation: enforce adapter transform keys exactly match required version tags + ensureNoDuplicateAdapterIds(options.adapters); + + // Ensure migrations have been applied before we touch adapter tables. + async function ensureMigrationsUpToDate(adapter: Adapter, _ctx: string) { + const versions = adapter.versions; + if (!versions || versions.length === 0) return; + await runStartupSqlMigrations(adapter.id, versions, db); } - return out; -} -export class Vault< - TDatabase extends CompatibleDB, - TAdapters extends Adapter[], -> { - readonly adapters: TAdapters; - readonly db: TDatabase; - readonly migrateFunc: ( - db: TDatabase, - config: MigrationConfig, - ) => Promise; - - constructor(config: VaultConfig) { - this.adapters = config.adapters; - this.db = config.database; - this.migrateFunc = config.migrateFunc; + // Standard Schema validation runner + async function runValidation( + adapter: Adapter, + value: unknown, + ): Promise { + const { validator } = adapter; + if (!validator) + throw new Error( + `validation required: adapter '${adapter.id}' has no validator`, + ); + + const result = await validator['~standard'].validate(value); + if (result.issues) { + throw new Error( + `importData: validation failed for adapter '${adapter.id}': ${result.issues + .map((i) => i.message.trim()) + .join('; ')}`, + ); + } + return 'value' in result ? result.value : value; } /** - * Create and initialize the Vault. Runs migrations for selected adapters before returning. + * Ensure no duplicate adapter IDs at runtime (covers non-literal arrays) + * @throws if duplicate IDs found */ - static async create< - TDatabase extends CompatibleDB, - TAdapters extends Adapter[], - >(config: VaultConfig) { - const vault = new Vault(config); - await vault.migrate(); - return vault; + function ensureNoDuplicateAdapterIds( + adapters: UniqueAdapters, + ) { + const seen = new Set(); + for (const a of adapters) { + if (seen.has(a.id)) + throw new Error( + `createVault: duplicate adapter ID found at runtime: '${a.id}'`, + ); + seen.add(a.id); + } } /** - * Run migrations for the selected adapters (defaults to all). - * Uses adapter.drizzleConfig.out verbatim; resolution/existence is delegated to migrateFunc implementation. + * Build a drizzle-arktype based dataset validator for a given adapter's schema. + * Validates the de-prefixed dataset shape: { [unprefixedTable]: Row[] }. + * Throws with aggregated messages on any row failure and returns morphed rows when available. */ - private async migrate() { - // TODO something better than whatever this is - const modulePath = import.meta.resolve('../adapters'); - const mod = (await import(modulePath)) as Record; - - for (const adapter of this.adapters) { - for (const func of Object.values(mod)) { - if (typeof func !== 'function') continue; - const a = func(); - this.adapters[0]?.schema['']; - - // TODO again, not amazing - if (!a || typeof a === 'object' || !('id' in a) || a.id !== adapter.id) + async function createDrizzleArkTypeValidator(adapter: Adapter) { + // Precompute per-table select schemas indexed by unprefixed table key + const schemas = new Map>(); + for (const [tableName, table] of listTables(adapter.schema)) { + // Expect tableName like "_"; strip "_" + const unprefixed = tableName.startsWith(`${adapter.id}_`) + ? tableName.slice(adapter.id.length + 1) + : tableName; + const t = createSelectSchema(table); + schemas.set(unprefixed, t); + } + + return async (value: unknown) => { + const ds = (value ?? {}) as Record; + const issues: string[] = []; + const out: Record = {}; + + for (const [key, rows] of Object.entries(ds)) { + const typeForTable = schemas.get(key); + if (!typeForTable) { + issues.push( + `unknown table '${key}' for adapter '${adapter.id}' (no schema found)`, + ); + continue; + } + const validator = typeForTable['~standard']; + if (!Array.isArray(rows)) { + issues.push(`table '${key}' expected an array`); continue; + } + const nextRows: unknown[] = []; + for (let i = 0; i < rows.length; i++) { + const row = rows[i]; + const res = await validator.validate(row); + if (res.issues) { + const msgs = res.issues + .map((m: { message: string }) => m.message.trim()) + .join('; '); + issues.push(`${key}[${i}]: ${msgs}`); + } else { + const v = res.value ?? row; + nextRows.push(v); + } + } + out[key] = nextRows; + } - await this.migrateFunc(this.db, { - migrationsFolder: adapter.drizzleConfig.out ?? '', - migrationsSchema: adapter.drizzleConfig.migrations?.schema ?? '', - migrationsTable: adapter.drizzleConfig.migrations?.table ?? '', - }); + if (issues.length) { + throw new Error( + `importData: drizzle-arktype validation failed for adapter '${adapter.id}': ${issues.join('; ')}`, + ); } - } + return out; + }; } /** - * Parse a file/blob with each selected adapter and upsert into the database. - * Returns a summary with per-adapter counts. + * Drop all rows from a table by name, then insert all provided rows. + * @throws if table not found in adapter schema */ - async importBlob( - blob: Blob, - adapterId: (TAdapters[number]['id'] & object) | string, // Allow any string but keep intellisense + async function replaceAdapterTables( + adapter: Adapter, + dataset: Record, ) { - const reports: ImportReport[] = []; - - const adapter = this.adapters.find((a) => a.id === adapterId); - if (!adapter) throw new Error(`Adapter not found: ${adapterId}`); - - // Parse the blob via the selected adapter - const parsed = await adapter.parse(blob); - // Validate the parsed data against the adapter's schema, throw if invalid - const valid = adapter.validator.assert(parsed); - // Insert the data into the database - await adapter.upsert(this.db, valid); - - reports.push({ - adapter: adapter.name, - migrated: true, - counts: countRecords(parsed), - parsed, - }); - - const totalTables = reports.reduce( - (acc, r) => acc + Object.keys(r.counts).length, - 0, - ); - const totalRecords = reports.reduce( - (acc, r) => acc + Object.values(r.counts).reduce((a, b) => a + b, 0), - 0, - ); - - return { reports, totalTables, totalRecords }; - } + const { schema } = adapter; + for (const [tableName, rows] of Object.entries(dataset)) { + // Try direct lookup first (for prefixed keys like 'test_items') + let table = schema[tableName as keyof typeof schema]; - getCurrentLayout(adapterId?: (TAdapters[number]['id'] & object) | string) { - const schemas = []; + // If not found, try adding adapter prefix (for unprefixed keys like 'items') + if (!table) { + const prefixedName = `${adapter.id}_${tableName}`; + table = schema[prefixedName as keyof typeof schema]; + } - for (const adapter of this.adapters) { - if (adapterId && adapter.id !== adapterId) continue; + // If still not found, throw with helpful error + if (!table) { + const prefixedName = `${adapter.id}_${tableName}`; + throw new Error( + `replaceAdapterTables: unknown table '${tableName}' for adapter '${adapter.id}'. Tried '${tableName}' and '${prefixedName}'`, + ); + } - const humanReadable = readableSchemaInfo( - adapter.schema, - // Adapter.metadata: table -> column -> description - adapter.metadata, - ); - schemas.push(humanReadable); + await db.delete(table); + for (const row of rows) { + await db.insert(table).values([row]); + } } - - return schemas; } + + return { + async exportData(opts: ExportOptions) { + const { + adapterIDs, + codec, + conventions: conv = defaultConvention(), + } = opts; + const adapters = + adapterIDs === undefined + ? options.adapters + : options.adapters.filter((a) => adapterIDs.includes(a.id)); + + ensureNoDuplicateAdapterIds(adapters); + + const files = new Map(); + + // Iterate over each adapter + for (const adapter of adapters) { + await ensureMigrationsUpToDate(adapter, 'exportData'); + const { schema } = adapter; + const adapterId = adapter.id; + + // Iterate over each table in the adapter's schema + for (const [tableName, table] of listTables(schema)) { + // Select for all rows from the table + const rows = await db.select().from(table); + + const pkCols = listPrimaryKeys(tableName, table); + const tableCols = new Set(listColumns(table).map(([name]) => name)); + + for (const row of rows) { + // Build a flat record deterministically for the codec + const rec: Record = {}; + for (const [k, v] of Object.entries(row)) { + if (!tableCols.has(k)) continue; + rec[k] = v; + } + + // Compute path using PK values + const pkValues = pkCols.map(([name]) => row[name]); + const basePath = conv.pathFor(adapterId, tableName, pkValues); + + const path = `${basePath}.${codec.fileExtension}`; + const filename = path.split('/').pop(); + if (!filename) throw new Error('invalid filename'); + const text = codec.stringify(rec); + const file = new File([text], filename, { + type: codec.mimeType, + }); + + files.set(path, file); + } + } + const { path: metaPath, file: metaFile } = + await createMigrationMetadataFile(adapter, db); + files.set(metaPath, metaFile); + } + + return files; + }, + async importData(opts: ImportOptions) { + const { files, codec } = opts; + + // Group files by detected adapter id and collect per-adapter detected tags from metadata + type Group = { files: Array<[string, File]>; detectedTag?: string }; + const groups = new Map(); + + const knownAdapterIds = new Set(options.adapters.map((a) => a.id)); + + for (const [path, input] of files) { + const parts = path.split('/').filter((segment) => segment.length > 0); + + // Locate any known adapter id segment in the path + const adapterIndex = parts.findIndex((p) => knownAdapterIds.has(p)); + if (adapterIndex === -1) continue; // can't determine adapter; skip + + const adapterIdFromPath = parts[adapterIndex]; + if (!adapterIdFromPath) + throw new Error('unable to determine adapter ID from path'); + + // Migration metadata handling; associate detected tag with this adapter group + if (parts.includes(MIGRATION_META_DIR)) { + try { + const text = await input.text(); + const parsed = codec.parse(text); + if (typeof parsed.tag === 'string') { + const group = groups.get(adapterIdFromPath) ?? { files: [] }; + group.detectedTag = parsed.tag; + groups.set(adapterIdFromPath, group); + } + } catch { + // ignore malformed metadata + } + continue; + } + + const group = groups.get(adapterIdFromPath) ?? { files: [] }; + group.files.push([path, input]); + groups.set(adapterIdFromPath, group); + } + + // Process each adapter group independently + for (const [adapterId, group] of groups) { + const adapter = options.adapters.find((a) => a.id === adapterId); + if (!adapter) continue; // unknown adapter in bundle; skip + + await ensureMigrationsUpToDate(adapter, 'importData'); + + const { schema } = adapter; + const dataset: Record = {}; + + for (const [path, input] of group.files) { + const parts = path.split('/').filter((segment) => segment.length > 0); + + // Recompute indices for this path + const aIdx = parts.indexOf(adapterId); + if (aIdx === -1) continue; + const pathParts = parts.slice(aIdx); + if (pathParts.length < 2) continue; // Need adapter/table structure + + const tableName = pathParts[1]; + if (!tableName) + throw new Error( + 'importData: unable to determine table name from path', + ); + + // Extension check against codec + const dot = path.lastIndexOf('.'); + if (dot < 0) + throw new Error(`importData: file ${path} has no extension`); + const ext = path.slice(dot + 1); + if (ext !== codec.fileExtension) + throw new Error( + `importData: file ${path} has wrong extension (expected ${codec.fileExtension})`, + ); + + // Table lookup + const table = schema[tableName as keyof typeof schema]; + if (!table) throw new Error(`importData: unknown table ${tableName}`); + + // Parse and denormalize row by table columns + const text = await input.text(); + const rec = codec.parse(text); + const row: Record = {}; + const tableCols = new Set(listColumns(table).map(([name]) => name)); + for (const [k, v] of Object.entries(rec ?? {})) { + if (!tableCols.has(k)) continue; + row[k] = v; + } + + // Dataset key is unprefixed table name (strip '_') + const key = tableName.slice(adapterId.length + 1); + dataset[key] ??= []; + dataset[key].push(row); + } + + // Build required drizzle-arktype validator bound to this adapter's schema + const dataValidator = await createDrizzleArkTypeValidator(adapter); + + // Run migrations/transforms pipeline with drizzle-arktype validation (sole validator for import) + // We don't want to run the adapter's built-in validator here because it likely won't match the preprocessed shape + const validatedDataset = await runImportPipeline({ + adapter, + dataset, + transformsOverride: undefined, + versionsOverride: undefined, + dataValidator, + sourceTag: undefined, + detectedTag: group.detectedTag, + }); + + // Replace adapter tables with validated dataset + await replaceAdapterTables(adapter, validatedDataset); + } + }, + async ingestData(opts: IngestOptions) { + const adapter = opts.adapter; + const file = opts.file; + + ensureNoDuplicateAdapterIds([adapter]); + await ensureMigrationsUpToDate(adapter, 'ingestData'); + + if (!adapter.ingestors || adapter.ingestors.length === 0) + throw new Error( + `ingestData: adapter '${adapter.id}' has no ingestors configured`, + ); + + // Catch may be unnecessary, but protects against faulty ingestor implementations + const ingestor = adapter.ingestors.find((i) => { + try { + return i.matches(file); + } catch { + return false; + } + }); + // If no ingestor matched, throw + if (!ingestor) + throw new Error( + `ingestData: no ingestor matched file '${file.name}' for adapter '${adapter.id}'`, + ); + + const dataset = await ingestor.parse(file); + + // Run validation and use morphed value + const validated = await runValidation(adapter, dataset); + + // TODO is this necessary or is there a better way? + // We might be able to do a runtime-based "on-conflict-replace" insert instead + await replaceAdapterTables( + adapter, + // TODO refine type + validated as Record, + ); + }, + getQueryInterface() { + // Populate a map of adapter ID -> table name -> table object + const tables = {} as AdapterTableMap; + for (const adapter of options.adapters) { + tables[adapter.id as AdapterIDs] = adapter.schema; + } + return { + db, + tables, + }; + }, + }; } diff --git a/packages/vault-core/src/utils/archive/index.ts b/packages/vault-core/src/utils/archive/index.ts new file mode 100644 index 0000000000..a04ccfbcbe --- /dev/null +++ b/packages/vault-core/src/utils/archive/index.ts @@ -0,0 +1,2 @@ +export * from './tar'; +export * from './zip'; diff --git a/packages/vault-core/src/utils/archive/tar/index.spec.ts b/packages/vault-core/src/utils/archive/tar/index.spec.ts new file mode 100644 index 0000000000..2d4725e519 --- /dev/null +++ b/packages/vault-core/src/utils/archive/tar/index.spec.ts @@ -0,0 +1,49 @@ +import { describe, it } from 'bun:test'; +import assert from 'node:assert/strict'; +import { TAR } from './index'; + +describe('TAR', () => { + it('packs and unpacks single text file', async () => { + const files = { 'hello.txt': 'hello world' }; + const tar = await TAR.pack(files); + assert.ok(tar instanceof Uint8Array); + assert.equal(tar.length % 512, 0); + const unpacked = await TAR.unpack(tar); + assert.equal(Object.keys(unpacked).length, 1); + assert.equal( + new TextDecoder().decode(unpacked['hello.txt']), + 'hello world', + ); + }); + + it('packs and unpacks binary file', async () => { + const bin = new Uint8Array(100); + for (let i = 0; i < bin.length; i++) bin[i] = i; + const files = { 'data.bin': bin }; + const tar = await TAR.pack(files); + const unpacked = await TAR.unpack(tar); + assert.equal(unpacked['data.bin']?.length, 100); + assert.equal(unpacked['data.bin']?.[50], 50); + }); + + it('handles multiple files and preserves content', async () => { + const files = { + 'a.txt': 'A', + 'path/b.txt': 'Bee', + 'nested/deep/c.txt': 'Sea', + }; + const tar = await TAR.pack(files); + const unpacked = await TAR.unpack(tar); + assert.deepEqual(Object.keys(unpacked).sort(), Object.keys(files).sort()); + for (const k of Object.keys(files) as Array) { + assert.equal(new TextDecoder().decode(unpacked[k]), files[k]); + } + }); + + it('aligns file data to 512-byte boundaries (padding check)', async () => { + const content = 'x'.repeat(700); // crosses one 512 boundary, requires padding + const tar = await TAR.pack({ 'pad.txt': content }); + // header (512) + data padded to 1024 (data + padding) + two zero-blocks (1024) => total 2560 + assert.equal(tar.length, 2560); + }); +}); diff --git a/packages/vault-core/src/utils/archive/tar/index.ts b/packages/vault-core/src/utils/archive/tar/index.ts new file mode 100644 index 0000000000..63510c010a --- /dev/null +++ b/packages/vault-core/src/utils/archive/tar/index.ts @@ -0,0 +1,195 @@ +type TarEntryInput = string | Uint8Array; +type TarFilesInput = Record; +type TarUnpacked = Record; + +const BLOCK_SIZE = 512; +const USTAR_MAGIC = 'ustar'; + +export type TarNamespace = { + /** Create a tar archive (Uint8Array) from a map of filename->content */ + pack(files: TarFilesInput, options?: { mtime?: number }): Promise; + /** Extract a tar archive into a map of filename->bytes */ + unpack(tarBytes: Uint8Array): Promise; +}; + +function textToBytes(text: string): Uint8Array { + const enc = new TextEncoder(); + return enc.encode(text); +} + +function bytesToText(bytes: Uint8Array): string { + const dec = new TextDecoder(); + return dec.decode(bytes); +} + +function pad(str: string, length: number, padChar = '\0'): string { + if (str.length >= length) return str.slice(0, length); + return str + padChar.repeat(length - str.length); +} + +function octal(value: number, length: number): string { + const str = value.toString(8); + const padLen = length - str.length - 1; // reserve one byte for null + const padded = `${'0'.repeat(Math.max(0, padLen))}${str}\0`; + return pad(padded, length, ' '); +} + +function computeChecksum(block: Uint8Array): number { + let sum = 0; + for (let i = 0; i < block.length; i++) { + const byte = block[i]; + if (byte === undefined) continue; + sum += byte; + } + return sum; +} + +function setString( + buf: Uint8Array, + offset: number, + length: number, + value: string, +) { + for (let i = 0; i < length; i++) { + buf[offset + i] = i < value.length ? value.charCodeAt(i) : 0; // null padding + } +} + +function createHeader(name: string, size: number, mtime: number): Uint8Array { + const block = new Uint8Array(BLOCK_SIZE); + // Set all with zeros (already) + if (name.length > 100) + throw new Error(`tar: filename too long (>${100}): ${name}`); + + setString(block, 0, 100, name); // name + setString(block, 100, 8, pad('0000644', 8)); // mode + setString(block, 108, 8, pad('0000000', 8)); // uid + setString(block, 116, 8, pad('0000000', 8)); // gid + setString(block, 124, 12, octal(size, 12)); // size + setString(block, 136, 12, octal(mtime, 12)); // mtime + // checksum field initially filled with spaces (0x20) + for (let i = 148; i < 156; i++) block[i] = 0x20; + setString(block, 156, 1, '0'); // typeflag '0' regular file + // linkname (unused) 157-256 + setString(block, 257, 6, `${USTAR_MAGIC}\0`); // magic 'ustar\0' + setString(block, 263, 2, '00'); // version + // uname / gname (empty) + // compute checksum + const checksum = computeChecksum(block); + const chkStr = octal(checksum, 8); // includes null + space + setString(block, 148, 8, chkStr); + return block; +} + +function concat(chunks: Uint8Array[]): Uint8Array { + let total = 0; + for (const c of chunks) total += c.length; + const out = new Uint8Array(total); + let offset = 0; + for (const c of chunks) { + out.set(c, offset); + offset += c.length; + } + return out; +} + +function normalizeInput(data: TarEntryInput): Uint8Array { + return typeof data === 'string' ? textToBytes(data) : data; +} + +function padToBlock(data: Uint8Array): Uint8Array { + if (data.length % BLOCK_SIZE === 0) return data; + const padLen = BLOCK_SIZE - (data.length % BLOCK_SIZE); + const padArr = new Uint8Array(padLen); + return concat([data, padArr]); +} + +function isEndBlock(block: Uint8Array): boolean { + for (let i = 0; i < BLOCK_SIZE; i++) if (block[i] !== 0) return false; + return true; +} + +function parseOctal(bytes: Uint8Array, offset: number, length: number): number { + let str = ''; + for (let i = 0; i < length; i++) { + const c = bytes[offset + i]; + if (c === undefined || c === 0 || c === 32) break; // null or space + str += String.fromCharCode(c); + } + if (!str) return 0; + return Number.parseInt(str.trim(), 8) || 0; +} + +function readString(bytes: Uint8Array, offset: number, length: number): string { + let end = offset; + const max = offset + length; + while (end < max && bytes[end] !== 0) end++; + return bytesToText(bytes.subarray(offset, end)); +} + +function packSync(files: TarFilesInput, mtimeOverride?: number): Uint8Array { + const chunks: Uint8Array[] = []; + const now = mtimeOverride ?? Math.floor(Date.now() / 1000); + for (const rawName in files) { + const name = rawName.replace(/\\+/g, '/'); + const file = files[rawName]; + if (!file) continue; // skip empty + const content = normalizeInput(file); + const header = createHeader(name, content.length, now); + chunks.push(header); + chunks.push(content); + if (content.length % BLOCK_SIZE !== 0) { + const padLen = BLOCK_SIZE - (content.length % BLOCK_SIZE); + chunks.push(new Uint8Array(padLen)); + } + } + // two zero blocks terminator + chunks.push(new Uint8Array(BLOCK_SIZE)); + chunks.push(new Uint8Array(BLOCK_SIZE)); + return concat(chunks); +} + +function unpackSync(tarBytes: Uint8Array): TarUnpacked { + const out: TarUnpacked = {}; + let offset = 0; + const len = tarBytes.length; + let zeroCount = 0; + while (offset + BLOCK_SIZE <= len) { + const block = tarBytes.subarray(offset, offset + BLOCK_SIZE); + offset += BLOCK_SIZE; + if (isEndBlock(block)) { + zeroCount++; + if (zeroCount === 2) break; + continue; + } + zeroCount = 0; + + const name = readString(block, 0, 100); + if (!name) continue; // skip invalid + const size = parseOctal(block, 124, 12); + const typeflag = block[156]; + // Only supporting regular files '0' or 0 + if (typeflag !== 48 /* '0' */ && typeflag !== 0) { + // Skip unsupported file types; still need to advance + } + const fileData = tarBytes.subarray(offset, offset + size); + out[name] = fileData.slice(); // copy + // advance with padding + const dataAndPad = padToBlock(fileData); + offset += dataAndPad.length; + } + return out; +} + +const pack: TarNamespace['pack'] = async (files, options) => { + return packSync(files, options?.mtime); +}; + +const unpack: TarNamespace['unpack'] = async (bytes) => { + return unpackSync(bytes); +}; + +export const TAR = { + pack, + unpack, +} satisfies TarNamespace; diff --git a/packages/vault-core/src/utils/archive/zip/index.spec.ts b/packages/vault-core/src/utils/archive/zip/index.spec.ts new file mode 100644 index 0000000000..1001e9172f --- /dev/null +++ b/packages/vault-core/src/utils/archive/zip/index.spec.ts @@ -0,0 +1,41 @@ +// Node.js built-in test runner specs for ZIP utilities +// Run with: node --test (after TypeScript compilation if needed) + +import { describe, it } from 'bun:test'; +import assert from 'node:assert/strict'; +import { ZIP } from './index'; + +function bytes(n: number): Uint8Array { + const a = new Uint8Array(n); + for (let i = 0; i < n; i++) a[i] = (i * 19 + 7) % 256; + return a; +} + +describe('ZIP.archive/extract', () => { + it('archives and extracts multiple files preserving content', async () => { + const files = { + 'a.txt': 'Hello world', + 'b.bin': bytes(256), + 'nested/c.txt': 'Nested file', + }; + const zipped = await ZIP.pack(files, { level: 6 }); + assert.ok(zipped.length > 20); + const out = await ZIP.unpack(zipped); + // Basic presence checks + assert.ok(out['a.txt']); + assert.ok(out['b.bin']); + assert.ok(out['nested/c.txt']); + // Content verification + const decoder = new TextDecoder(); + assert.equal(decoder.decode(out['a.txt']), 'Hello world'); + assert.equal(decoder.decode(out['nested/c.txt']), 'Nested file'); + const originalBin = files['b.bin'] as Uint8Array; + const extractedBin = out['b.bin']; + assert.equal(extractedBin.length, originalBin.length); + for (let i = 0; i < originalBin.length; i++) { + if (originalBin[i] !== extractedBin[i]) { + assert.fail(`byte mismatch at ${i}`); + } + } + }); +}); diff --git a/packages/vault-core/src/utils/archive/zip/index.ts b/packages/vault-core/src/utils/archive/zip/index.ts new file mode 100644 index 0000000000..170a0167bf --- /dev/null +++ b/packages/vault-core/src/utils/archive/zip/index.ts @@ -0,0 +1,37 @@ +import { strToU8, unzipSync, zipSync } from 'fflate'; + +export type ZipInputFile = Uint8Array | string; + +export type ZipNamespace = { + /** Create a zip archive (Uint8Array) from a map of filename->content */ + pack( + files: Record, + options?: { level?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 }, + ): Promise; + /** Extract a zip archive into a map of filename->bytes */ + unpack(bytes: Uint8Array): Promise>; +}; + +function normalizeFiles( + files: Record, +): Record { + const out: Record = {}; + for (const [name, value] of Object.entries(files)) { + out[name] = typeof value === 'string' ? strToU8(value) : value; + } + return out; +} + +const pack: ZipNamespace['pack'] = async (files, options) => { + const normalized = normalizeFiles(files); + return zipSync(normalized, { level: options?.level ?? 6 }); // default is 6 +}; + +const unpack: ZipNamespace['unpack'] = async (bytes) => { + return unzipSync(bytes); +}; + +export const ZIP = { + pack, + unpack, +} satisfies ZipNamespace; diff --git a/packages/vault-core/src/utils/encoding/gzip/index.spec.ts b/packages/vault-core/src/utils/encoding/gzip/index.spec.ts new file mode 100644 index 0000000000..769a3d83ed --- /dev/null +++ b/packages/vault-core/src/utils/encoding/gzip/index.spec.ts @@ -0,0 +1,54 @@ +// Node.js built-in test runner specs for GZIP utilities +// Run with: node --test (after TypeScript compilation if needed) + +import { describe, it } from 'bun:test'; +import assert from 'node:assert/strict'; +import { GZIP } from './index'; + +const textSample = 'The quick brown fox jumps over the lazy dog'; + +function randomBytes(len: number): Uint8Array { + const arr = new Uint8Array(len); + for (let i = 0; i < len; i++) arr[i] = (i * 31 + 17) % 256; // deterministic pattern + return arr; +} + +describe('GZIP.compress/decompress', () => { + it('roundtrips UTF-8 text (bytes path)', async () => { + const gz = await GZIP.encode(textSample); // default bytes output + const outBytes = await GZIP.decode(gz); // default bytes + const decoded = new TextDecoder().decode(outBytes as Uint8Array); + assert.equal(decoded, textSample); + }); + + it('roundtrips binary data', async () => { + const bytes = randomBytes(1024); + const gz = await GZIP.encode(bytes, { level: 9 }); + const raw = await GZIP.decode(gz); + assert.equal(raw.length, bytes.length); + for (let i = 0; i < raw.length; i++) { + if (raw[i] !== bytes[i]) { + assert.fail(`mismatch at index ${i}`); + } + } + }); + it('produces base64 output and decodes back to text', async () => { + const b64 = await GZIP.encode(textSample, { output: 'base64' }); + assert.equal(typeof b64, 'string'); + const decoded = await GZIP.decode(b64 as string, { + inputEncoding: 'base64', + output: 'string', + }); + assert.equal(decoded, textSample); + }); + + it('errors when passing string without base64 flag', async () => { + let threw = false; + try { + await GZIP.decode('not base64 data'); + } catch { + threw = true; + } + assert.ok(threw); + }); +}); diff --git a/packages/vault-core/src/utils/encoding/gzip/index.ts b/packages/vault-core/src/utils/encoding/gzip/index.ts new file mode 100644 index 0000000000..50e9bac9b3 --- /dev/null +++ b/packages/vault-core/src/utils/encoding/gzip/index.ts @@ -0,0 +1,107 @@ +import { gunzipSync, gzipSync, strFromU8, strToU8 } from 'fflate'; + +type Level = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9; + +export type GzipCompressOptions = { + level?: Level; + output?: 'bytes' | 'base64'; +}; + +export type GzipDecompressOptions = { + inputEncoding?: 'raw' | 'base64'; + output?: 'bytes' | 'string'; +}; + +export type GzipNamespace = { + /** Gzip-compress data (Uint8Array or string) into bytes (default) or base64 string */ + encode: typeof encode; + /** Gzip-decompress data (Uint8Array or base64 string) into bytes (default) or string */ + decode: typeof decode; +}; + +const toUint8 = (data: Uint8Array | string): Uint8Array => + typeof data === 'string' ? strToU8(data) : data; + +const toBase64 = (bytes: Uint8Array): string => { + let binary = ''; + for (let i = 0; i < bytes.length; i++) { + const byte = bytes[i]; + if (byte === undefined) continue; + binary += String.fromCharCode(byte); + } + // btoa is available in browsers; for environments without btoa, a polyfill would be needed upstream. + return btoa(binary); +}; + +const fromBase64 = (b64: string): Uint8Array => { + const bin = atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; i++) arr[i] = bin.charCodeAt(i); + return arr; +}; + +// Overloaded encode implementation +function encode(data: Uint8Array | string): Promise; +function encode( + data: Uint8Array | string, + options: { output: 'bytes'; level?: Level }, +): Promise; +function encode( + data: Uint8Array | string, + options: { output: 'base64'; level?: Level }, +): Promise; +function encode( + data: Uint8Array | string, + options?: GzipCompressOptions, +): Promise; +async function encode( + data: Uint8Array | string, + options?: GzipCompressOptions, +) { + const raw = gzipSync(toUint8(data), { level: options?.level ?? 6 }); // default is 6 + if (options?.output === 'base64') return toBase64(raw); + return raw; +} + +// Overloaded decode implementation +function decode(data: Uint8Array): Promise; +function decode( + data: Uint8Array, + options: { output: 'bytes'; inputEncoding?: 'raw' | 'base64' }, +): Promise; +function decode( + data: Uint8Array, + options: { output: 'string'; inputEncoding?: 'raw' | 'base64' }, +): Promise; +function decode( + data: string, + options: { inputEncoding: 'base64'; output?: 'bytes' }, +): Promise; +function decode( + data: string, + options: { inputEncoding: 'base64'; output: 'string' }, +): Promise; +function decode( + data: Uint8Array | string, + options?: GzipDecompressOptions, +): Promise; +async function decode( + data: Uint8Array | string, + options?: GzipDecompressOptions, +) { + let bytes: Uint8Array; + if (typeof data === 'string') { + if ((options?.inputEncoding ?? 'raw') !== 'base64') { + throw new Error('String input requires options.inputEncoding = "base64"'); + } + bytes = fromBase64(data); + } else { + bytes = data; + } + const out = gunzipSync(bytes); + if (options?.output === 'string') return strFromU8(out); + return out; +} + +// Export using functions with overloads +export const GZIP = { encode, decode } satisfies GzipNamespace; diff --git a/packages/vault-core/src/utils/encoding/index.ts b/packages/vault-core/src/utils/encoding/index.ts new file mode 100644 index 0000000000..cd6b661872 --- /dev/null +++ b/packages/vault-core/src/utils/encoding/index.ts @@ -0,0 +1 @@ +export * from './gzip'; diff --git a/packages/vault-core/src/utils/format/csv/index.spec.ts b/packages/vault-core/src/utils/format/csv/index.spec.ts new file mode 100644 index 0000000000..000e647b30 --- /dev/null +++ b/packages/vault-core/src/utils/format/csv/index.spec.ts @@ -0,0 +1,157 @@ +import { describe, it } from 'bun:test'; // Run with: node --test dist/... after build +import assert from 'node:assert/strict'; +import { CSV } from './index'; + +describe('csv.CSV.parse', () => { + it('parses with headers (default) into objects', () => { + const input = 'name,age\nAlice,30\nBob,25'; + const result = CSV.parse(input); + assert.equal(result.length, 2); + assert.deepEqual(result[0], { name: 'Alice', age: '30' }); + assert.deepEqual(result[1], { name: 'Bob', age: '25' }); + }); + + it('parses without headers into row arrays', () => { + const input = 'name,age\nAlice,30'; + const result = CSV.parse(input, { headers: false }); + assert.deepEqual(result, [ + ['name', 'age'], + ['Alice', '30'], + ]); + }); + + it('handles quotes, escaped quotes and commas', () => { + const input = 'col1,col2\n"a,b","c""d"'; + const result = CSV.parse(input); + assert.deepEqual(result[0], { col1: 'a,b', col2: 'c"d' }); + }); + + it('parses CRLF line endings and embedded newline inside quoted field', () => { + const input = 'h1,h2\r\n"line1\nline2",value\r\nlast,entry'; + const result = CSV.parse(input); + assert.equal(result.length, 2); + assert.deepEqual(result[0], { h1: 'line1\nline2', h2: 'value' }); + assert.deepEqual(result[1], { h1: 'last', h2: 'entry' }); + }); +}); + +describe('csv.CSV.stringify', () => { + it('stringifies object rows with headers', () => { + const data = [ + { name: 'Alice', age: 30 }, + { name: 'Bob', age: 25 }, + ]; + const text = CSV.stringify(data); + const lines = text.split('\n'); + assert.equal(lines[0], 'name,age'); + assert.equal(lines[1], 'Alice,30'); + assert.equal(lines[2], 'Bob,25'); + }); + + it('stringifies raw rows without headers', () => { + const rows = [ + ['name', 'age'], + ['Alice', '30'], + ]; + const text = CSV.stringify(rows, { headers: false }); + assert.equal(text, 'name,age\nAlice,30'); + }); + + it('quotes fields containing delimiter, newline, quotes, or surrounding space', () => { + const rows = [['a,comma', 'multi\nline', '"quoted"', ' spaced ']]; + const text = CSV.stringify(rows, { headers: false }); + assert.equal(text, '"a,comma","multi\nline","""quoted"""," spaced "'); + }); + + it('roundtrips object rows', () => { + const original = [ + { a: '1', b: 'x,y' }, + { a: '2', b: 'z' }, + ]; + const csvText = CSV.stringify(original); + const parsed = CSV.parse(csvText); + assert.deepEqual(parsed, original); + }); +}); + +describe('csv options', () => { + it('parses with custom delimiter ; and headers', () => { + const input = 'name;age\nAlice;30\nBob;25'; + const result = CSV.parse(input, { delimiter: ';' }); + assert.deepEqual(result[0], { name: 'Alice', age: '30' }); + }); + + it('stringifies with custom delimiter ;', () => { + const data = [ + { name: 'Alice', age: 30 }, + { name: 'Bob', age: 25 }, + ]; + const text = CSV.stringify(data, { delimiter: ';' }); + assert.equal(text.split('\n')[0], 'name;age'); + }); + + it("parses with custom quote ' and escape '", () => { + const manual = "name,quote\n'alice','say ''hi'''"; // easier to read + const result = CSV.parse(manual, { quote: "'", escape: "'" }); + assert.deepEqual(result[0], { name: 'alice', quote: "say 'hi'" }); + }); + + it('preserves whitespace when trim=false', () => { + const input = 'name,age\n Alice , 30 '; // spaces around fields + const result = CSV.parse(input, { trim: false }); + const first = result[0]; + assert.equal(first?.name, ' Alice '); + assert.equal(first?.age, ' 30 '); + }); + + it('includes empty line when skipEmptyLines=false', () => { + const input = 'a,b\n1,2\n\n3,4'; + const rows = CSV.parse(input, { headers: false, skipEmptyLines: false }); + // rows: header, first, empty, last + assert.equal(rows.length, 4); + assert.deepEqual(rows[2], ['']); // an empty row (single empty field) due to parser logic + }); + + it('skips comment lines with custom comment char ;', () => { + const input = ';ignored line\nname,age\nAlice,30'; + const result = CSV.parse(input, { comment: ';' }); + assert.equal(result.length, 1); // only Alice row + }); + + it('roundtrips with mixed options (custom delimiter & quote)', () => { + const data = [{ path: 'C;\\temp', text: "O'hara" }]; + const csvText = CSV.stringify(data, { + delimiter: ';', + quote: "'", + escape: "'", + }); + const parsed = CSV.parse(csvText, { + delimiter: ';', + quote: "'", + escape: "'", + }); + assert.deepEqual(parsed, [{ path: 'C;\\temp', text: "O'hara" }]); + }); + + it('handles very large field containing delimiters and quotes', () => { + const longSegment = 'segment,'; // contains delimiter + const repeated = `${Array.from({ length: 500 }, () => longSegment).join('')}"tail"`; + const data = [{ big: repeated, other: 'x' }]; + const csvText = CSV.stringify(data); // default options + // Ensure it quoted the first line's big field (starts with header line then quoted field) + const lines = csvText.split('\n'); + assert.ok(lines[1]?.startsWith('"')); + const parsed = CSV.parse(csvText); + assert.equal(parsed[0]?.big, repeated); + assert.equal(parsed[0]?.other, 'x'); + }); + + it('treats comment char inside quotes as literal text', () => { + const input = + '#comment line\nname,remark\nAlice,"#not a comment"\nBob,valid'; + const parsed = CSV.parse(input, { comment: '#' }); + assert.equal(parsed.length, 2); // two data rows + assert.deepEqual(parsed[0], { name: 'Alice', remark: '#not a comment' }); + assert.deepEqual(parsed[1], { name: 'Bob', remark: 'valid' }); + }); +}); diff --git a/packages/vault-core/src/utils/format/csv/index.ts b/packages/vault-core/src/utils/format/csv/index.ts new file mode 100644 index 0000000000..1ec1221027 --- /dev/null +++ b/packages/vault-core/src/utils/format/csv/index.ts @@ -0,0 +1,233 @@ +export type CsvOptions = { + /** Character used to separate values. Default is comma (,). */ + delimiter?: string; + /** Character used to quote values. Default is double quote ("). */ + quote?: string; + /** Character used to escape quotes inside quoted values. Default is double quote (") (e.g. "foo ""bar"" baz"). */ + escape?: string; + /** Whether to trim whitespace around values. Default is true. */ + trim?: boolean; + /** Whether the first row contains headers. Default is true. */ + headers?: boolean; + /** Whether to skip empty lines. Default is true. */ + skipEmptyLines?: boolean; + /** If comments are included, the character used to denote them. Default is #. */ + comment?: string; +}; + +export type CsvNamespace = { + /** + * Parse CSV text. Default (headers omitted or true) returns array of objects (keyed by header row). + * When headers=false, returns a 2D string array of raw rows. + */ + parse = Record>( + text: string, + options?: CsvOptions & { headers?: true }, + ): T[]; + parse(text: string, options: CsvOptions & { headers: false }): string[][]; + parse = Record>( + text: string, + options?: CsvOptions & { headers?: boolean }, + ): T[] | string[][]; + + /** + * Stringify object rows (default, headers omitted or true) or raw 2D string arrays (headers=false). + */ + stringify( + data: Record[], + options?: CsvOptions & { headers?: true }, + ): string; + stringify(data: string[][], options: CsvOptions & { headers: false }): string; + stringify( + data: Record[] | string[][], + options?: CsvOptions & { headers?: boolean }, + ): string; +}; + +const defaultOpts: Required = { + delimiter: ',', + quote: '"', + escape: '"', + trim: true, + headers: true, + skipEmptyLines: true, + comment: '#', +}; + +/** + * Parse a CSV string into objects (default, headers=true) or raw row arrays (headers=false). + */ +function parseCsv = Record>( + input: string, + options?: CsvOptions & { headers?: true }, +): T[]; +function parseCsv( + input: string, + options: CsvOptions & { headers: false }, +): string[][]; +function parseCsv = Record>( + input: string, + options?: CsvOptions & { headers?: boolean }, +): T[] | string[][] { + const opts = { ...defaultOpts, ...options }; + + const rows: string[][] = []; + let current: string[] = []; + let field = ''; + let inQuotes = false; + + const delimiterOpt = opts.delimiter; + const quoteOpt = opts.quote; + const escapeOpt = opts.escape; + + const pushField = () => { + let val = field; + if (opts.trim) val = val.trim(); + current.push(val); + field = ''; + }; + + const pushRow = () => { + if (!(opts.skipEmptyLines && current.length === 1 && current[0] === '')) { + rows.push(current); + } + current = []; + }; + + for (let i = 0; i < input.length; i++) { + const char = input[i]; + const next = input[i + 1]; + + // Handle comments at start of line + if ( + !inQuotes && + char === opts.comment && + (i === 0 || input[i - 1] === '\n' || input[i - 1] === '\r') + ) { + // Skip until end of line + while (i < input.length && input[i] !== '\n') i++; + continue; + } + + if (inQuotes) { + if (char === escapeOpt && next === quoteOpt) { + field += quoteOpt; + i++; // skip escaped quote + } else if (char === quoteOpt) { + inQuotes = false; + } else { + field += char; + } + } else { + if (char === quoteOpt) { + inQuotes = true; + } else if (char === delimiterOpt) { + pushField(); + } else if (char === '\n') { + pushField(); + pushRow(); + } else if (char === '\r') { + // CRLF support + } else { + field += char; + } + } + } + // Flush last field & row + pushField(); + pushRow(); + + // If headers !== false → return array of objects + if (opts.headers !== false && rows.length > 0) { + const [headerRow, ...body] = rows; + return body.map((row) => { + const obj: Record = {}; + for (const [idx, key] of headerRow?.entries() ?? []) { + obj[key] = row[idx] ?? ''; + } + return obj as T; + }); + } + + return rows; +} + +// (Removed conditional utility types in favor of overloads for clearer typing.) + +/** + * Stringify arrays or objects into a CSV string. + */ +// Type guards to discriminate between object row input and 2D string array input. +function isObjectRowArray( + data: Record[] | string[][], +): data is Record[] { + return data.length === 0 || !Array.isArray(data[0]); +} + +function is2DStringArray( + data: Record[] | string[][], +): data is string[][] { + return data.length === 0 || Array.isArray(data[0]); +} + +// Overloads: callers get proper type expectations without needing casts. +function stringifyCsv( + data: Record[], + options?: CsvOptions & { headers?: true }, +): string; +function stringifyCsv( + data: string[][], + options: CsvOptions & { headers: false }, +): string; +function stringifyCsv( + data: Record[] | string[][], + options?: CsvOptions & { headers?: boolean }, +): string { + const opts = { ...defaultOpts, ...options }; + + const rows: string[][] = []; + + // Treat undefined headers as true (consistent with defaults) + if (opts.headers !== false && isObjectRowArray(data) && data.length > 0) { + const firstRow = data[0]; + if (firstRow === undefined) return ''; + const keys = Object.keys(firstRow); + rows.push(keys); + for (const obj of data) { + rows.push(keys.map((k) => String(obj[k] ?? ''))); + } + } else if (is2DStringArray(data)) { + // Either headers explicitly false or user passed raw rows. + for (const row of data) { + // Ensure all values are stringified (in case of accidental non-string entries) + rows.push(row.map((v) => String(v))); + } + } + + const needsQuoting = (val: string) => + val.includes(opts.delimiter) || + val.includes('\n') || + val.includes('\r') || + val.includes(opts.quote) || + /^\s|\s$/.test(val); + + return rows + .map((row) => + row + .map((val) => { + let v = String(val); + if (needsQuoting(v)) { + v = v.replaceAll(opts.quote, opts.escape + opts.quote); + return opts.quote + v + opts.quote; + } + return v; + }) + .join(opts.delimiter), + ) + .join('\n'); +} + +export const CSV = { + parse: parseCsv, + stringify: stringifyCsv, +} satisfies CsvNamespace; diff --git a/packages/vault-core/src/utils/format/index.ts b/packages/vault-core/src/utils/format/index.ts new file mode 100644 index 0000000000..a2956837bd --- /dev/null +++ b/packages/vault-core/src/utils/format/index.ts @@ -0,0 +1,5 @@ +export * from './csv'; +export * from './jsonc'; +export * from './jsonl'; +export * from './markdown'; +export * from './yaml'; diff --git a/packages/vault-core/src/utils/format/jsonc/index.spec.ts b/packages/vault-core/src/utils/format/jsonc/index.spec.ts new file mode 100644 index 0000000000..31ebdbc4a5 --- /dev/null +++ b/packages/vault-core/src/utils/format/jsonc/index.spec.ts @@ -0,0 +1,47 @@ +import { describe, it } from 'bun:test'; +import assert from 'node:assert/strict'; +import { JSONC } from './index'; + +describe('JSONC.parse', () => { + it('parses object with single-line comments', () => { + const input = `{ + // comment about a + "a": 1, + "b": 2 // trailing comment + }`; + const result = JSONC.parse(input); + assert.deepEqual(result, { a: 1, b: 2 }); + }); + + it('parses object with multi-line comments and trailing commas', () => { + const input = `{ + /* block comment */ + "a": 1, + "b": 2, + }`; + const result = JSONC.parse(input); + assert.deepEqual(result, { a: 1, b: 2 }); + }); + + it('applies reviver function', () => { + const input = '{"a":1,"b":2}'; + const result = JSONC.parse(input, (_, v) => + typeof v === 'number' ? v * 10 : v, + ); + assert.deepEqual(result, { a: 10, b: 20 }); + }); +}); + +describe('JSONC.stringify', () => { + it('stringifies object using JSON.stringify behavior', () => { + const obj = { a: 1, b: 'x' }; + const text = JSONC.stringify(obj); + assert.equal(text, JSON.stringify(obj)); + }); + + it('supports replacer and space parameters', () => { + const obj = { a: 1, b: 2 }; + const text = JSONC.stringify(obj, (k, v) => (k === 'b' ? undefined : v), 2); + assert.equal(text, JSON.stringify({ a: 1 }, null, 2)); + }); +}); diff --git a/packages/vault-core/src/utils/format/jsonc/index.ts b/packages/vault-core/src/utils/format/jsonc/index.ts new file mode 100644 index 0000000000..01451882c5 --- /dev/null +++ b/packages/vault-core/src/utils/format/jsonc/index.ts @@ -0,0 +1,37 @@ +export type JsoncNamespace = { + /** + * Minimal JSONC parser/stringifier supporting comments and trailing commas. + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse MDN reference} + */ + parse( + text: string, + reviver?: (this: unknown, key: string, value: unknown) => unknown, + ): Record; + /** + * Minimal JSONC stringifier (currently identical to JSON.stringify). + * Does not add comments or trailing commas. + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify MDN reference} + */ + stringify( + obj: Record, + replacer?: (this: unknown, key: string, value: unknown) => unknown, + space?: string | number, + ): string; +}; + +const parse: JsoncNamespace['parse'] = (text, reviver) => { + // Simple JSONC parser that removes comments and trailing commas + const noComments = text + .replace(/\/\/.*$/gm, '') // Remove single-line comments + .replace(/\/\*[\s\S]*?\*\//g, '') // Remove multi-line comments + .replace(/,\s*([}\]])/g, '$1'); // Remove trailing commas + + return JSON.parse(noComments, reviver); +}; + +const stringify: JsoncNamespace['stringify'] = JSON.stringify; + +export const JSONC = { + parse, + stringify, +} satisfies JsoncNamespace; diff --git a/packages/vault-core/src/utils/format/jsonl/index.spec.ts b/packages/vault-core/src/utils/format/jsonl/index.spec.ts new file mode 100644 index 0000000000..40304444bf --- /dev/null +++ b/packages/vault-core/src/utils/format/jsonl/index.spec.ts @@ -0,0 +1,39 @@ +import { describe, it } from 'bun:test'; // Run with: node --test dist/... after build +import assert from 'node:assert/strict'; +import { JSONL } from './index'; + +describe('JSONL.parse', () => { + it('parses multiple lines into objects', () => { + const input = '{"a":1}\n{"b":2}\n'; + const result = JSONL.parse(input); + assert.deepEqual(result, [{ a: 1 }, { b: 2 }]); + }); + + it('returns empty array for blank input', () => { + const result = JSONL.parse(' '); + assert.deepEqual(result, []); + }); + + it('throws with line number on invalid JSON', () => { + const input = '{"a":1}\n{"b":}\n{"c":3}'; + try { + JSONL.parse(input); + assert.fail('Expected error'); + } catch (e) { + assert.ok(e instanceof Error); + assert.match(e.message, /Invalid JSONL at line 2/); + } + }); +}); + +describe('JSONL.stringify', () => { + it('stringifies array of objects to JSONL', () => { + const arr = [{ a: 1 }, { b: 2 }]; + const text = JSONL.stringify(arr); + assert.equal(text, '{"a":1}\n{"b":2}'); + }); + + it('throws on empty array', () => { + assert.throws(() => JSONL.stringify([]), /non-empty array/); + }); +}); diff --git a/packages/vault-core/src/utils/format/jsonl/index.ts b/packages/vault-core/src/utils/format/jsonl/index.ts new file mode 100644 index 0000000000..49e938c6fe --- /dev/null +++ b/packages/vault-core/src/utils/format/jsonl/index.ts @@ -0,0 +1,50 @@ +export type JsonlNamespace = { + /** + * Parses a JSON Lines (JSONL) formatted string into an array of objects. + * Each line should be a valid JSON object. + * @see {@link https://jsonlines.org/ JSON Lines specification} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse MDN reference} + */ + parse(text: string): Record[]; + /** + * Converts an array of objects into a JSON Lines (JSONL) formatted string. + * Each object will be serialized as a JSON string on its own line. + * @see {@link https://jsonlines.org/ JSON Lines specification} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify MDN reference} + */ + stringify(arr: Record[]): string; +}; + +const parse: JsonlNamespace['parse'] = (text) => { + if (!text || text.trim().length === 0) return []; + const lines = text.split(/\r?\n/).filter((line) => line.trim().length > 0); + const result = []; + + // Parse each line as JSON, collecting objects and throwing on errors + for (const [i, line] of lines.entries()) { + try { + const obj = JSON.parse(line); + if (obj && typeof obj === 'object' && !Array.isArray(obj)) { + result.push(obj as Record); + } + } catch (error) { + // Re-throw with line number for easier debugging + throw new Error(`Invalid JSONL at line ${i + 1}: ${line}`, { + cause: error, + }); + } + } + return result; +}; + +const stringify: JsonlNamespace['stringify'] = (arr) => { + if (!Array.isArray(arr) || arr.length === 0) + throw new Error('Input must be a non-empty array'); + const lines = arr.map((obj) => JSON.stringify(obj)); + return lines.join('\n'); +}; + +export const JSONL = { + parse, + stringify, +} satisfies JsonlNamespace; diff --git a/packages/vault-core/src/utils/format/markdown/index.spec.ts b/packages/vault-core/src/utils/format/markdown/index.spec.ts new file mode 100644 index 0000000000..3967314b2a --- /dev/null +++ b/packages/vault-core/src/utils/format/markdown/index.spec.ts @@ -0,0 +1,51 @@ +import { describe, it } from 'bun:test'; // Run with: node --test dist/... after build +import assert from 'node:assert/strict'; +import { Markdown } from './index'; + +describe('Markdown.parse', () => { + it('parses body without frontmatter gracefully', () => { + const input = 'Hello world'; + const result = Markdown.parse(input); + assert.deepEqual(result, { body: 'Hello world', frontmatter: {} }); + }); + + it('parses YAML frontmatter and body', () => { + const input = + '---\n' + + 'title: Test Doc\n' + + 'draft: true\n' + + '---\n\n' + + 'Content here.'; + const result = Markdown.parse(input); + assert.equal(result.body.trim(), 'Content here.'); + assert.deepEqual(result.frontmatter, { title: 'Test Doc', draft: true }); + }); +}); + +describe('Markdown.stringify', () => { + it('stringifies with frontmatter', () => { + const obj = { + body: 'Content here.', + frontmatter: { title: 'Test Doc', draft: true }, + }; + const md = Markdown.stringify(obj); + assert.match(md, /---/); + assert.match(md, /title: Test Doc/); + assert.match(md, /draft: true/); + assert.match(md, /Content here\./); + }); + + it('stringifies without frontmatter', () => { + const obj = { body: 'Just content', frontmatter: {} }; + const md = Markdown.stringify(obj); + assert.equal(md, 'Just content'); + }); + + it('roundtrips frontmatter + body', () => { + const original = { body: 'Body text', frontmatter: { a: 1 } }; + const md = Markdown.stringify(original); + const parsed = Markdown.parse(md); + assert.deepEqual(parsed.frontmatter, { a: 1 }); + assert.equal(parsed.body.trim(), 'Body text'); + }); +}); diff --git a/packages/vault-core/src/utils/format/markdown/index.ts b/packages/vault-core/src/utils/format/markdown/index.ts new file mode 100644 index 0000000000..01873be2d5 --- /dev/null +++ b/packages/vault-core/src/utils/format/markdown/index.ts @@ -0,0 +1,59 @@ +import { YAML } from '../yaml'; + +type Result = { body: string; frontmatter?: Record }; + +export type MarkdownNamespace = { + /** + * Minimal Markdown parser that parses a Markdown string into body text & frontmatter. + * Currently supports YAML frontmatter only. + * @see {@link ../yaml/index.ts YAML parser} + */ + parse(text: string): Result; + /** + * Minimal Markdown stringifier that combines body text & frontmatter into a Markdown string. + * Currently supports YAML frontmatter only. + * @see {@link ../yaml/index.ts YAML stringifier} + */ + stringify(data: Result): string; +}; + +const FRONTMATTER_REGEX = /^---\s*\r?\n([\s\S]*?)\r?\n---\s*\r?\n?/; + +const parse: MarkdownNamespace['parse'] = (text) => { + if (!text || text.trim().length === 0) return { body: '', frontmatter: {} }; + + let body = text; + let frontmatter: Record = {}; + + const match = text.match(FRONTMATTER_REGEX); + if (match) { + const yamlText = match[1]; + if (yamlText === undefined) return { body: text }; + body = text.slice(match[0].length); + try { + frontmatter = YAML.parse(yamlText); + } catch (error) { + throw new Error('Invalid YAML frontmatter', { cause: error }); + } + } + + return { body, frontmatter }; +}; + +const stringify: MarkdownNamespace['stringify'] = (data) => { + if (!data || typeof data.body !== 'string') + throw new Error('Input must have a body string'); + + let frontmatterText = ''; + if (data.frontmatter && Object.keys(data.frontmatter).length > 0) { + const yamlText = YAML.stringify(data.frontmatter); + frontmatterText = `---\n${yamlText}---\n\n`; + } + + return `${frontmatterText}${data.body}`; +}; + +export const Markdown = { + parse, + stringify, +} satisfies MarkdownNamespace; diff --git a/packages/vault-core/src/utils/format/toml/index.ts b/packages/vault-core/src/utils/format/toml/index.ts new file mode 100644 index 0000000000..e9ac10b0e9 --- /dev/null +++ b/packages/vault-core/src/utils/format/toml/index.ts @@ -0,0 +1,14 @@ +import { parse } from 'toml'; + +export type TomlNamespace = { + /** Minimal TOML parser/stringifier. + * @see {@link https://toml.io/en/v1.0.0 TOML specification} + */ + parse(text: string): Record; + // I guess TOML stringification is not really a thing?? + // stringify(data: Record): string; +}; + +export const TOML = { + parse, +} satisfies TomlNamespace; diff --git a/packages/vault-core/src/utils/format/yaml/index.ts b/packages/vault-core/src/utils/format/yaml/index.ts new file mode 100644 index 0000000000..c9927266ef --- /dev/null +++ b/packages/vault-core/src/utils/format/yaml/index.ts @@ -0,0 +1,29 @@ +import { parse, stringify } from 'yaml'; + +// Wanted to use `bun` but apparently YAML stringification is just... not a thing in most implementations... + +export type YamlNamespace = { + /** + * Compliant YAML parser. Follows YAML 1.2 spec (superset of JSON). + * @see {@link https://yaml.org/spec/1.2.2/ YAML 1.2 specification} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse MDN reference} + */ + parse( + text: string, + reviver?: (this: unknown, key: string, value: unknown) => unknown, + ): Record; + /** + * Compliant YAML stringifier. Follows YAML 1.2 spec (superset of JSON). + * @see {@link https://yaml.org/spec/1.2.2/ YAML 1.2 specification} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify MDN reference} + */ + stringify( + obj: Record, + replacer?: (this: unknown, key: string, value: unknown) => unknown, + ): string; +}; + +export const YAML = { + parse, + stringify, +} satisfies YamlNamespace; diff --git a/packages/vault-core/tests/export-import-roundtrip.spec.ts b/packages/vault-core/tests/export-import-roundtrip.spec.ts new file mode 100644 index 0000000000..7b1c8b8a7f --- /dev/null +++ b/packages/vault-core/tests/export-import-roundtrip.spec.ts @@ -0,0 +1,104 @@ +import Database from 'bun:sqlite'; +import { test } from 'bun:test'; +import assert from 'node:assert/strict'; +import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { jsonFormat } from '../src/codecs/json'; +import { defineAdapter } from '../src/core/adapter'; +import { + defineTransformRegistry, + defineVersions, + runStartupSqlMigrations, +} from '../src/core/migrations'; +import { createVault } from '../src/core/vault'; + +function createDatabase() { + const sqlite = new Database(':memory:'); + const db = drizzle(sqlite); + return { sqlite, db }; +} + +const roundtripProfiles = sqliteTable('roundtrip_profiles', { + id: text('id').primaryKey(), + birthdate: integer('birthdate', { mode: 'timestamp' }), + verifiedBirthdate: integer('verified_birthdate', { mode: 'timestamp' }), + verificationState: text('verification_state').notNull().default(''), + verificationMethod: text('verification_method').notNull().default(''), +}); + +const roundtripSchema = { + roundtrip_profiles: roundtripProfiles, +}; + +const roundtripVersions = defineVersions({ + tag: '0000', + sql: [ + `CREATE TABLE IF NOT EXISTS roundtrip_profiles ( + id TEXT PRIMARY KEY, + birthdate INTEGER, + verified_birthdate INTEGER, + verification_state TEXT NOT NULL DEFAULT '', + verification_method TEXT NOT NULL DEFAULT '' + );`, + ], +}); + +const roundtripTransforms = defineTransformRegistry({}); + +const createRoundtripAdapter = defineAdapter(() => ({ + id: 'roundtrip', + schema: roundtripSchema, + versions: roundtripVersions, + transforms: roundtripTransforms, +})); + +test('export/import roundtrip preserves null and Date columns', async () => { + const adapter = createRoundtripAdapter(); + const sampleDate = new Date('2024-04-05T12:00:00Z'); + + const { sqlite: sourceSqlite, db: sourceDb } = createDatabase(); + let exported: Map; + try { + await runStartupSqlMigrations(adapter.id, adapter.versions, sourceDb); + await sourceDb.insert(roundtripProfiles).values({ + id: 'singleton', + birthdate: null, + verifiedBirthdate: sampleDate, + verificationState: '', + verificationMethod: '', + }); + + const sourceVault = createVault({ + database: sourceDb, + adapters: [adapter], + }); + exported = await sourceVault.exportData({ codec: jsonFormat }); + } finally { + sourceSqlite.close(); + } + + const { sqlite: targetSqlite, db: targetDb } = createDatabase(); + try { + const targetVault = createVault({ + database: targetDb, + adapters: [adapter], + }); + + await targetVault.importData({ files: exported, codec: jsonFormat }); + + const rows = await targetDb.select().from(roundtripProfiles); + assert.equal(rows.length, 1); + const row = rows[0]; + assert.equal(row?.id, 'singleton'); + assert.equal(row?.birthdate, null); + assert.ok(row?.verifiedBirthdate instanceof Date); + assert.equal( + row?.verifiedBirthdate?.toISOString(), + sampleDate.toISOString(), + ); + assert.equal(row?.verificationState, ''); + assert.equal(row?.verificationMethod, ''); + } finally { + targetSqlite.close(); + } +}); diff --git a/packages/vault-core/tests/fixtures/testAdapter.ts b/packages/vault-core/tests/fixtures/testAdapter.ts new file mode 100644 index 0000000000..97b877eb35 --- /dev/null +++ b/packages/vault-core/tests/fixtures/testAdapter.ts @@ -0,0 +1,123 @@ +import { type } from 'arktype'; +import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { jsonFormat } from '../../src/codecs'; +import { defineAdapter } from '../../src/core/adapter'; +import type { MigrationMetadata } from '../../src/core/import/migrationMetadata'; +import { defineIngestor } from '../../src/core/ingestor'; +import { + defineTransformRegistry, + defineVersions, +} from '../../src/core/migrations'; + +function createMemoryFile( + name: string, + payload: unknown, + type = 'application/json', +): File { + const contents = + typeof payload === 'string' + ? payload + : jsonFormat.stringify(payload as Record); + return new File([new Blob([contents], { type })], name, { type }); +} + +const testItems = sqliteTable('test_items', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), +}); + +export const testSchema = { + test_items: testItems, +}; + +export const testVersions = defineVersions({ + tag: '0000', + sql: [ + `CREATE TABLE IF NOT EXISTS test_items ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + created_at INTEGER NOT NULL + );`, + ], +}); + +export const testTransforms = defineTransformRegistry({}); + +export const ingestSchema = type({ + items: type({ + id: 'number', + name: 'string', + createdAt: type('number').pipe((v) => new Date(v)), + }).array(), +}); + +const jsonIngestor = defineIngestor({ + matches(file: File) { + return file.name.endsWith('.json'); + }, + async parse(file) { + const text = await file.text(); + return JSON.parse(text); + }, +}); + +export const TEST_ADAPTER_ID = 'test'; + +export const createTestAdapter = defineAdapter(() => ({ + id: TEST_ADAPTER_ID, + schema: testSchema, + versions: testVersions, + transforms: testTransforms, + validator: ingestSchema, + ingestors: [jsonIngestor], +})); + +export const validIngestData = { + items: [ + { id: 1, name: 'Alpha', createdAt: 1700000000000 }, + { id: 2, name: 'Beta', createdAt: 1700000000500 }, + ], +} satisfies typeof ingestSchema.inferIn; // This represents the expected input shape + +export const invalidIngestData = { + items: [{ id: 3 } as unknown as (typeof validIngestData)['items'][number]], +}; + +export function makeIngestFile( + data = validIngestData, + name = 'test-data.json', +): File { + const ingestPayload = { + items: data.items.map((item) => ({ ...item })), + }; + return createMemoryFile(name, ingestPayload); +} + +export function makeImportFiles( + data: typeof ingestSchema.inferOut, +): Map { + const files = new Map(); + const records = data.items; + for (const item of records) { + const filename = `${item.id}.json`; + // Use the same path structure as export: vault/adapter/table/filename + files.set( + `vault/test/test_items/${filename}`, + createMemoryFile(filename, item), + ); + } + files.set( + `__meta__/${TEST_ADAPTER_ID}/migration.json`, + createMemoryFile('migration.json', { + tag: testVersions[testVersions.length - 1]?.tag ?? null, + adapterId: TEST_ADAPTER_ID, + source: 'adapter', + ledgerTag: null, + latestDeclaredTag: testVersions[testVersions.length - 1]?.tag ?? null, + versions: testVersions.map((version) => version.tag), + exportedAt: new Date(0), + } satisfies MigrationMetadata), + ); + return files; +} diff --git a/packages/vault-core/tests/import-paths.spec.ts b/packages/vault-core/tests/import-paths.spec.ts new file mode 100644 index 0000000000..49e5e95d9c --- /dev/null +++ b/packages/vault-core/tests/import-paths.spec.ts @@ -0,0 +1,135 @@ +import Database from 'bun:sqlite'; +import { test } from 'bun:test'; +import assert from 'node:assert/strict'; +import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { jsonFormat } from '../src/codecs/json'; +import { createVault } from '../src/core/vault'; +import { + createTestAdapter, + ingestSchema, + makeImportFiles, + testSchema, + validIngestData, +} from './fixtures/testAdapter'; + +/** + * Small helper to create an in-memory DB + vault instance (same pattern as vault.spec.ts) + */ +function createDatabase() { + const sqlite = new Database(':memory:'); + const db = drizzle(sqlite); + return { sqlite, db }; +} + +function createVaultInstance() { + const { sqlite, db } = createDatabase(); + const adapter = createTestAdapter(); + const vault = createVault({ + database: db, + adapters: [adapter], + }); + return { sqlite, db, adapter, vault }; +} + +/** + * Utility: transform the keys of a Map by applying a replacer function + */ +function remapFileKeys(files: Map, fn: (k: string) => string) { + const out = new Map(); + for (const [k, v] of files) { + out.set(fn(k), v); + } + return out; +} + +test('import path variants: default path (export shape) works', async () => { + const { sqlite, db, vault } = createVaultInstance(); + try { + const ingested = ingestSchema(validIngestData); + if ('issues' in ingested) throw new Error('Ingested data has issues'); + const files = makeImportFiles(ingested); + await vault.importData({ + files, + codec: jsonFormat, + }); + const rows = await db.select().from(testSchema.test_items); + assert.equal(rows.length, validIngestData.items.length); + } finally { + sqlite.close(); + } +}); + +test('import path variants: empty base path (adapter/table/filename) works', async () => { + const { sqlite, db, vault } = createVaultInstance(); + try { + const ingested = ingestSchema(validIngestData); + if ('issues' in ingested) throw new Error('Ingested data has issues'); + const files = makeImportFiles(ingested); + const remapped = remapFileKeys(files, (k) => k.replace(/^vault\//, '')); // remove vault/ + await vault.importData({ + files: remapped, + codec: jsonFormat, + }); + const rows = await db.select().from(testSchema.test_items); + assert.equal(rows.length, validIngestData.items.length); + } finally { + sqlite.close(); + } +}); + +test('import path variants: non-default base path (data/vault/...) works', async () => { + const { sqlite, db, vault } = createVaultInstance(); + try { + const ingested = ingestSchema(validIngestData); + if ('issues' in ingested) throw new Error('Ingested data has issues'); + const files = makeImportFiles(ingested); + const remapped = remapFileKeys(files, (k) => `data/${k}`); // prepend data/ + await vault.importData({ + files: remapped, + codec: jsonFormat, + }); + const rows = await db.select().from(testSchema.test_items); + assert.equal(rows.length, validIngestData.items.length); + } finally { + sqlite.close(); + } +}); + +test('import path variants: multiple folders before vault works', async () => { + const { sqlite, db, vault } = createVaultInstance(); + try { + const ingested = ingestSchema(validIngestData); + if ('issues' in ingested) throw new Error('Ingested data has issues'); + const files = makeImportFiles(ingested); + const remapped = remapFileKeys(files, (k) => `a/b/${k}`); + await vault.importData({ + files: remapped, + codec: jsonFormat, + }); + const rows = await db.select().from(testSchema.test_items); + assert.equal(rows.length, validIngestData.items.length); + } finally { + sqlite.close(); + } +}); + +test('import path variants: duplicate/trailing slashes are tolerated', async () => { + const { sqlite, db, vault } = createVaultInstance(); + try { + const ingested = ingestSchema(validIngestData); + if ('issues' in ingested) throw new Error('Ingested data has issues'); + const files = makeImportFiles(ingested); + // Inject an extra slash in the base path and in the table segment + const remapped = remapFileKeys(files, (k) => + k.replace('vault/', 'vault//').replace('test_items/', 'test_items//'), + ); + await vault.importData({ + files: remapped, + codec: jsonFormat, + }); + const rows = await db.select().from(testSchema.test_items); + assert.equal(rows.length, validIngestData.items.length); + } finally { + sqlite.close(); + } +}); diff --git a/packages/vault-core/tests/transforms-alignment.spec.ts b/packages/vault-core/tests/transforms-alignment.spec.ts new file mode 100644 index 0000000000..c8eef84402 --- /dev/null +++ b/packages/vault-core/tests/transforms-alignment.spec.ts @@ -0,0 +1,62 @@ +import { test } from 'bun:test'; +import assert from 'node:assert/strict'; +import type { Adapter } from '../src/core/adapter'; +import { defineAdapter } from '../src/core/adapter'; +import { + defineTransformRegistry, + defineVersions, +} from '../src/core/migrations'; +import { testSchema, testVersions } from './fixtures/testAdapter'; + +test('defineAdapter throws when transforms include extra tags not declared in versions', () => { + const transformsWithExtra = defineTransformRegistry({ + '0001': (value) => value, + }); + + const createBadAdapter = defineAdapter( + // @ts-expect-error missing versions, this should fail the type check + (() => + ({ + id: 'test', + schema: testSchema, + versions: testVersions, + transforms: transformsWithExtra, + }) as unknown as Adapter) as () => Adapter, + ); + + assert.throws(() => createBadAdapter(), /transforms do not match versions/i); +}); + +test('defineAdapter throws when transforms are missing required tags from versions', () => { + const versions = defineVersions( + { + tag: '0000', + sql: [ + `CREATE TABLE IF NOT EXISTS test_items ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + created_at INTEGER NOT NULL + );`, + ], + }, + { + tag: '0001', + sql: [], + }, + ); + + const emptyTransforms = defineTransformRegistry({}); + + const createBadAdapter = defineAdapter( + // @ts-expect-error invalid validator, this should fail the type check + (() => + ({ + id: 'test', + schema: testSchema, + versions, + transforms: emptyTransforms, + }) as unknown as Adapter) as () => Adapter, + ); + + assert.throws(() => createBadAdapter(), /transforms do not match versions/i); +}); diff --git a/packages/vault-core/tsconfig.json b/packages/vault-core/tsconfig.json new file mode 100644 index 0000000000..5e515b4388 --- /dev/null +++ b/packages/vault-core/tsconfig.json @@ -0,0 +1,44 @@ +{ + // Visit https://aka.ms/tsconfig to read more about this file + "compilerOptions": { + // File Layout + // "rootDir": "./src", + // "outDir": "./dist", + + // Environment Settings + // See also https://aka.ms/tsconfig/module + "module": "esnext", + "target": "esnext", + "types": ["bun-types"], + "moduleResolution": "bundler", + // For nodejs: + // "lib": ["esnext"], + // "types": ["node"], + // and npm install -D @types/node + + // Other Outputs + "sourceMap": true, + "declaration": true, + "declarationMap": true, + + // Stricter Typechecking Options + "noUncheckedIndexedAccess": true, + "exactOptionalPropertyTypes": true, + + // Style Options + // "noImplicitReturns": true, + // "noImplicitOverride": true, + // "noUnusedLocals": true, + // "noUnusedParameters": true, + // "noFallthroughCasesInSwitch": true, + // "noPropertyAccessFromIndexSignature": true, + + // Recommended Options + "strict": true, + "verbatimModuleSyntax": true, + "isolatedModules": true, + "noUncheckedSideEffectImports": true, + "moduleDetection": "force", + "skipLibCheck": true + } +}