diff --git a/.github/workflows/lint-and-format.yml b/.github/workflows/lint-and-format.yml index b5cbfb2..1b3da33 100644 --- a/.github/workflows/lint-and-format.yml +++ b/.github/workflows/lint-and-format.yml @@ -31,6 +31,9 @@ jobs: - name: Install dependencies run: pnpm install --frozen-lockfile + - name: Typecheck + run: pnpm typecheck + - name: Lint run: pnpm lint diff --git a/.gitignore b/.gitignore index 9637824..548f26f 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,5 @@ dist/ node_modules/ .DS_Store logs/ -data/private_keys.csv diff --git a/README.md b/README.md index dfbe455..6c62920 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ This repo is an Artillery-based load testing suite to measure limits and identif ## Requirements -- **Node.js >= 22** (SDK `@storagehub-sdk/core@0.3.4` declares this) +- **Node.js >= 22** - `pnpm` ## Install @@ -18,12 +18,13 @@ pnpm install Required: - `NETWORK` (`testnet`, `stagenet` or `local`) -Optional: -- none (health checks are unauthenticated) +Per-test required: +- `TEST_MNEMONIC` (required by tests that derive accounts and do SIWE) +- `FILE_KEY` (required by the download test) ## Network configuration -Network URLs/IDs are intentionally **hardcoded** in `src/networks.ts` and are copied from `datahaven-monitor`: +Network URLs/IDs are intentionally **hardcoded** in `src/networks.ts`: - **Testnet**: MSP `https://deo-dh-backend.testnet.datahaven-infra.network` - **Stagenet**: MSP `https://deo-dh-backend.stagenet.datahaven-infra.network` - **Local**: MSP `http://127.0.0.1:8080`, RPC `http://127.0.0.1:9888` @@ -44,18 +45,39 @@ Local notes: - `pnpm fmt:fix` — apply formatting - `pnpm lint` — check lint rules - `pnpm lint:fix` — apply safe lint fixes -- `pnpm test` — build -> preflight -> artillery -- `pnpm test:msp-unauth` — standalone unauth MSP load test (no SIWE, no keys) -- `pnpm test:download` — file download load test (requires SIWE auth + FILE_KEY) +- `pnpm test:run scenarios/.yml` — run any scenario (build + logs wrapper) -Examples (local): +List available scenarios: ```bash -NETWORK=local pnpm test:msp-unauth +ls scenarios +``` + +Run one: + +```bash +NETWORK=stagenet pnpm test:run scenarios/.yml +``` + +Examples (replace the scenario file with anything from `ls scenarios`): + +```bash +NETWORK=local pnpm test:run scenarios/artillery.msp-unauth.yml +``` + +```bash +LOG_LEVEL=info LOG_CONSOLE=true \ +NETWORK=stagenet \ +TEST_MNEMONIC="test test test test test test test test test test test junk" \ +pnpm test:run scenarios/examples.getProfile.yml ``` ```bash -NETWORK=local STORAGEHUB_PRIVATE_KEY=0x... pnpm test +LOG_LEVEL=info LOG_CONSOLE=true \ +NETWORK=stagenet \ +TEST_MNEMONIC="test test test test test test test test test test test junk" \ +FILE_KEY="" \ +pnpm test:run scenarios/download.yml ``` ## Logging @@ -71,11 +93,11 @@ Env vars: Examples: ```bash -LOG_LEVEL=debug NETWORK=testnet pnpm test +LOG_LEVEL=debug NETWORK=testnet pnpm test:run scenarios/artillery.msp-unauth.yml ``` ```bash -LOG_LEVEL=info LOG_FILE=./artillery.log NETWORK=testnet pnpm test +LOG_LEVEL=info LOG_FILE=./artillery.log NETWORK=testnet pnpm test:run scenarios/artillery.msp-unauth.yml ``` ## Standalone MSP unauth load test @@ -89,11 +111,10 @@ It uses `NETWORK=testnet|stagenet` and the MSP base URL from `src/networks.ts`. Run: ```bash -NETWORK=stagenet pnpm test:msp-unauth +NETWORK=stagenet pnpm test:run scenarios/artillery.msp-unauth.yml ``` Knobs (optional): -- `ARTILLERY_WORKERS=4` (true parallel local processes; spawns N concurrent Artillery runs) - `VU_SLEEP_MIN_MS=50` / `VU_SLEEP_MAX_MS=250` (jitter per request loop) - `MSP_TIMEOUT_MS=60000` (override HTTP timeout) @@ -104,55 +125,74 @@ Metrics emitted (counters + histograms): ## Download load test -This test authenticates via SIWE and downloads a file from the MSP, measuring throughput and latency. +This test performs init steps (derive + SIWE) and downloads a file from the MSP, measuring throughput and latency. Required env vars: -- `NETWORK` (`testnet` or `stagenet`) +- `NETWORK` (`testnet`, `stagenet` or `local`) +- `TEST_MNEMONIC` - `FILE_KEY` (the file key/hash to download) Run: ```bash -NETWORK=stagenet FILE_KEY= pnpm test:download +NETWORK=stagenet FILE_KEY= pnpm test:run scenarios/download.yml ``` Knobs (optional): -- `ARTILLERY_WORKERS=4` (parallel local processes) - `LOG_LEVEL=info` (see Logging section) Metrics emitted: -- `download.siwe.ok`, `download.siwe.ms` (SIWE auth) - `download.file.ok`, `download.file.ms` (file download) - `download.bytes` (total bytes downloaded per request) -- `download.siwe.err`, `download.file.err` (error counters) +- `download.file.err` (error counter) +- `auth.siwe.err` (only if SIWE fails; init steps are muted so only errors surface) + +## How initialization + mute metrics works + +Most scenarios follow the same pattern: +- **Init** (muted): `deriveAccount` → `SIWE` +- **Actions** (not muted): call one or more action processors (e.g. `actionGetProfile`, `downloadFile`) -## Per-VU private keys (Artillery payload) +The muting is controlled by two processor steps: +- `muteMetrics`: while muted, the metrics helper will **only emit `*.err` counters**; it drops ok counters + histograms. +- `unmuteMetrics`: restores normal metric emission for the action phase. -This test expects a per-VU `privateKey` variable from `config.payload` in `scenarios/artillery.yml`. +This keeps summaries focused on action timings while still surfacing setup/auth failures. -1) Create `data/private_keys.csv` (ignored by git), based on the example: -- `data/private_keys.example.csv` +### What `deriveAccount` does +- Picks a unique account index (via the local index allocator started by `scripts/run-scenario.ts`) +- Derives an account from `TEST_MNEMONIC` +- Persists `privateKey` (and derivation metadata) into Artillery vars for later steps -Notes: -- `pnpm preflight` will use `STORAGEHUB_PRIVATE_KEY` **if set**, otherwise it will use the **first key** in `data/private_keys.csv`. -- If Artillery does not inject `privateKey` into `context.vars` (depends on engine/runtime), the scenario will fall back to reading keys directly from `data/private_keys.csv` (round-robin). +### What `SIWE` does +- Reads the derived `privateKey` +- Calls the SDK SIWE auth (`mspClient.auth.SIWE(...)`) +- Persists the resulting `__siweSession` into Artillery vars -2) Run: +## How to add a new test + +1) **Create a scenario file** under `scenarios/` (for example `scenarios/myTest.yml`). + +2) **Use the standard template**: +- `config.processor: "../dist/src/processors/index.js"` +- Init steps (muted): `muteMetrics` → `deriveAccount` → `SIWE` → `unmuteMetrics` +- Then call your action processor(s) + +3) Run it via the generic runner: ```bash -NETWORK=stagenet LOG_LEVEL=info pnpm test +NETWORK=stagenet pnpm test:run scenarios/myTest.yml ``` -## Scenario output +(Optional) If you want a shortcut alias, add `test:myTest`: `"pnpm test:run scenarios/myTest.yml"`. + +## Metrics (quick orientation) -Counters: -- `sdk.storagehub.connect.ok` -- `sdk.msp.connect.ok` -- `sdk.disconnect.ok` -- `sdk.connect.error` +Metrics depend on the scenario and processor functions used. Common ones: +- `msp.health.ok`, `msp.health.ms`, `msp.info.ok`, `msp.info.ms`, `msp.req.err` +- `action.getProfile.ok`, `action.getProfile.ms`, `action.getProfile.err` +- `download.file.ok`, `download.file.ms`, `download.bytes`, `download.file.err` -Timings: -- `sdk.storagehub.connect.ms` -- `sdk.msp.connect.ms` +When init steps are wrapped with `muteMetrics`/`unmuteMetrics`, only `*.err` counters from init will appear in the summary (ok + histograms are muted). diff --git a/data/private_keys.example.csv b/data/private_keys.example.csv deleted file mode 100644 index ebc493b..0000000 --- a/data/private_keys.example.csv +++ /dev/null @@ -1,4 +0,0 @@ -privateKey -0x5fb92d6e98884f76de468fa3f6278f8807c48bebc13595d45af5bdc4da702133 - - diff --git a/package.json b/package.json index 097cc2c..eeb92f6 100644 --- a/package.json +++ b/package.json @@ -10,15 +10,19 @@ "fmt:fix": "biome format . --write", "lint": "biome lint .", "lint:fix": "biome lint . --write", + "typecheck": "tsc -p tsconfig.json --noEmit", "build": "tsc -p tsconfig.json", - "preflight": "pnpm exec tsx scripts/preflight.ts", - "test": "pnpm build && pnpm preflight && pnpm exec artillery run scenarios/artillery.yml", - "test:msp-unauth": "pnpm build && pnpm exec tsx scripts/run-artillery-parallel.ts scenarios/artillery.msp-unauth.yml", - "test:download": "pnpm build && pnpm exec tsx scripts/run-artillery-parallel.ts scenarios/artillery.download.yml" + "with-logs": "pnpm exec tsx scripts/run-scenario.ts --", + "run:with-logs": "pnpm build && pnpm with-logs", + "test:run": "pnpm run:with-logs artillery run", + + "test:unauth": "pnpm test:run scenarios/msp.unauth.yml", + "test:examples.getProfile": "pnpm test:run scenarios/examples.getProfile.yml", + "test:download": "pnpm test:run scenarios/download.yml" }, "dependencies": { - "@storagehub-sdk/core": "0.3.4", - "@storagehub-sdk/msp-client": "0.3.4", + "@storagehub-sdk/core": "0.4.0", + "@storagehub-sdk/msp-client": "0.4.0", "artillery": "^2.0.0", "pino": "^10.1.0", "viem": "^2.42.1" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index eb9277a..549e943 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -9,11 +9,11 @@ importers: .: dependencies: '@storagehub-sdk/core': - specifier: 0.3.4 - version: 0.3.4(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)) + specifier: 0.4.0 + version: 0.4.0(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)) '@storagehub-sdk/msp-client': - specifier: 0.3.4 - version: 0.3.4(@storagehub-sdk/core@0.3.4(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)))(viem@2.42.1(typescript@5.9.3)) + specifier: 0.4.0 + version: 0.4.0(@storagehub-sdk/core@0.4.0(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)))(viem@2.42.1(typescript@5.9.3)) artillery: specifier: ^2.0.0 version: 2.0.27(@types/node@25.0.2) @@ -1431,14 +1431,14 @@ packages: '@socket.io/component-emitter@3.1.2': resolution: {integrity: sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==} - '@storagehub-sdk/core@0.3.4': - resolution: {integrity: sha512-CLr/PiwnNMi18h0vUjb1zYvx7MeespxB+FHhxwVQqsaByQ2sLfp6PpZhvKB3dBd+Wi85rt79LhubUYeGkIoqAA==} + '@storagehub-sdk/core@0.4.0': + resolution: {integrity: sha512-rXyWUExPFDTEqMNgWZniEtVXXCPsbYF25PY/a/ksN17Hb8poQkbRh6WRZXJLTNZCukR1aP9gw1Ty0D25yWCRiQ==} engines: {node: '>=22'} peerDependencies: viem: '>=2.38.3' - '@storagehub-sdk/msp-client@0.3.4': - resolution: {integrity: sha512-GQd85nLK9dLIQxqsZJ6IzmNt2uOAohRju+aXGyTtjAP0isrP1Asefsuzi55e1wZ/8x7P2ppJhhqVUcziOIhinw==} + '@storagehub-sdk/msp-client@0.4.0': + resolution: {integrity: sha512-qyDBN1gG5B+3nSP9H+QdTaiVJDnW29A2TL8Olcdc/7WCAMRlQLpBBaCHPb69bKZKb4Lmt6uN/9EKxsy4xXhT1A==} engines: {node: '>=22'} peerDependencies: '@storagehub-sdk/core': '>=0.0.5' @@ -5489,7 +5489,7 @@ snapshots: '@socket.io/component-emitter@3.1.2': {} - '@storagehub-sdk/core@0.3.4(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3))': + '@storagehub-sdk/core@0.4.0(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3))': dependencies: '@polkadot/types': 16.5.4 abitype: 1.2.2(typescript@5.9.3) @@ -5501,9 +5501,9 @@ snapshots: - utf-8-validate - zod - '@storagehub-sdk/msp-client@0.3.4(@storagehub-sdk/core@0.3.4(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)))(viem@2.42.1(typescript@5.9.3))': + '@storagehub-sdk/msp-client@0.4.0(@storagehub-sdk/core@0.4.0(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)))(viem@2.42.1(typescript@5.9.3))': dependencies: - '@storagehub-sdk/core': 0.3.4(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)) + '@storagehub-sdk/core': 0.4.0(typescript@5.9.3)(viem@2.42.1(typescript@5.9.3)) viem: 2.42.1(typescript@5.9.3) '@substrate/ss58-registry@1.51.0': {} diff --git a/scenarios/artillery.yml b/scenarios/artillery.yml deleted file mode 100644 index 9ae0bdf..0000000 --- a/scenarios/artillery.yml +++ /dev/null @@ -1,20 +0,0 @@ -config: - target: "http://localhost" - processor: "../dist/src/processors/connect.js" - payload: - - path: "../data/private_keys.csv" - fields: - - privateKey - skipHeader: true - order: sequence - phases: - - name: connect - duration: 10 - arrivalRate: 10 - -scenarios: - - name: sdk_connect - flow: - - function: connectClients - - diff --git a/scenarios/artillery.download.yml b/scenarios/download.yml similarity index 50% rename from scenarios/artillery.download.yml rename to scenarios/download.yml index af0bdf4..3ceae60 100644 --- a/scenarios/artillery.download.yml +++ b/scenarios/download.yml @@ -1,6 +1,11 @@ config: target: "http://localhost" - processor: "../dist/src/processors/download.js" + processor: "../dist/src/processors/index.js" + variables: + # Account index selection (mnemonic-based): + ACCOUNT_MODE: sequential + ACCOUNT_INDEX_START: 0 + ACCOUNT_INDEX_COUNT: 1000 phases: - name: warmup @@ -24,5 +29,11 @@ config: scenarios: - name: download_file flow: + # Init (muted): derive + SIWE without polluting summary metrics + - function: muteMetrics + - function: deriveAccount + - function: SIWE + - function: unmuteMetrics + # Action - function: downloadFile diff --git a/scenarios/examples.getProfile.yml b/scenarios/examples.getProfile.yml new file mode 100644 index 0000000..0da6ce5 --- /dev/null +++ b/scenarios/examples.getProfile.yml @@ -0,0 +1,34 @@ +config: + # Template scenario: init steps + action step (metrics-focused) + # + # Notes: + # - Provide `NETWORK` and `TEST_MNEMONIC` via env vars. + # - Use `muteMetrics`/`unmuteMetrics` to keep init step metrics out of the summary + # while still surfacing error counters (e.g. `*.err`). + target: "http://localhost" + processor: "../dist/src/processors/index.js" + variables: + # Account index selection: + # - byIndex: all VUs use ACCOUNT_INDEX unless payload var `accountIndex` is provided + # - sequential/random: uses START/COUNT + ACCOUNT_MODE: byIndex + # Fallback if allocator isn't used (e.g. running Artillery directly without pnpm script) + ACCOUNT_INDEX: 0 + phases: + - name: init-and-actions + duration: 60 + arrivalRate: 5 +scenarios: + - name: getProfile + flow: + # Init (muted): derive + SIWE without polluting summary metrics + - function: muteMetrics + - function: deriveAccount + - function: SIWE + - function: unmuteMetrics + # Actions: focus metrics here + - loop: + - function: actionGetProfile + count: 100 + + diff --git a/scenarios/artillery.msp-unauth.yml b/scenarios/msp.unauth.yml similarity index 81% rename from scenarios/artillery.msp-unauth.yml rename to scenarios/msp.unauth.yml index fcb659b..b54617a 100644 --- a/scenarios/artillery.msp-unauth.yml +++ b/scenarios/msp.unauth.yml @@ -1,6 +1,6 @@ config: target: "http://localhost" - processor: "../dist/src/processors/msp-unauth.js" + processor: "../dist/src/processors/index.js" variables: # IMPORTANT: keep this in sync with the total duration of all phases (seconds). # (10 + 20 + 40 + 60 + 10) = 140 @@ -32,8 +32,9 @@ config: rampTo: 1 scenarios: - - name: msp_unauth_load + - name: MSP_Unauthenticated_calls flow: - - function: mspUnauthLoad + - function: getHealth + - function: getInfo diff --git a/scripts/preflight.ts b/scripts/preflight.ts deleted file mode 100644 index 4466daf..0000000 --- a/scripts/preflight.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { readEnv } from "../src/config.js"; -import { getLogger } from "../src/log.js"; -import { - authenticateWithSiwe, - connectMsp, - validateMspConnection, -} from "../src/sdk/msp.js"; -import { NETWORKS } from "../src/networks.js"; -import { initWalletFromPrivateKey, to0xPrivateKey } from "../src/sdk/wallet.js"; -import { readFileSync } from "node:fs"; -import { join } from "node:path"; - -function toError(err: unknown): Error { - if (err instanceof Error) { - return err; - } - return new Error(typeof err === "string" ? err : "Unknown error"); -} - -function readFirstPrivateKeyFromCsv(filePath: string): string | undefined { - const text = readFileSync(filePath, "utf8"); - const lines = text - .split(/\r?\n/g) - .map((l) => l.trim()) - .filter((l) => l.length > 0); - - for (const line of lines) { - if (line.toLowerCase() === "privatekey") { - continue; - } - // CSV is 1-column: privateKey - return line; - } - return undefined; -} - -async function main(): Promise { - const env = readEnv(); - const logger = getLogger(); - - const fromEnv = process.env.STORAGEHUB_PRIVATE_KEY; - const csvPath = join(process.cwd(), "data", "private_keys.csv"); - const fromCsv = (() => { - try { - return readFirstPrivateKeyFromCsv(csvPath); - } catch { - return undefined; - } - })(); - - const privateKeyRaw = fromEnv ?? fromCsv; - if (!privateKeyRaw) { - throw new Error( - "No private key available for SIWE. Set STORAGEHUB_PRIVATE_KEY or create data/private_keys.csv" - ); - } - - const privateKey = to0xPrivateKey(privateKeyRaw); - const network = NETWORKS[env.network]; - const { walletClient } = initWalletFromPrivateKey(network, privateKey); - - const msp = await connectMsp(env, logger); - await validateMspConnection(msp, logger); - await authenticateWithSiwe(msp, env, walletClient, logger); -} - -main().catch((err: unknown) => { - const error = toError(err); - // preflight should fail fast with a clear message for CI - console.error(error); - process.exitCode = 1; -}); diff --git a/scripts/run-artillery-parallel.ts b/scripts/run-artillery-parallel.ts deleted file mode 100644 index 2b9bd4c..0000000 --- a/scripts/run-artillery-parallel.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { spawn } from "node:child_process"; -import { createWriteStream, mkdirSync } from "node:fs"; -import { join } from "node:path"; - -type Args = Readonly<{ - scriptPath: string; - workers: number; -}>; - -function parseWorkers(raw: string | undefined): number { - if (!raw) return 1; - const n = Number.parseInt(raw, 10); - if (!Number.isFinite(n) || n <= 0) { - throw new Error( - `Invalid ARTILLERY_WORKERS: ${raw} (expected positive integer)` - ); - } - return n; -} - -function parseArgs(argv: string[]): Args { - const scriptPath = argv[2]; - if (!scriptPath) { - throw new Error( - "Usage: pnpm exec tsx scripts/run-artillery-parallel.ts " - ); - } - const workers = parseWorkers(process.env.ARTILLERY_WORKERS); - return { scriptPath, workers }; -} - -function pad2(n: number): string { - return n < 10 ? `0${n}` : String(n); -} - -function stamp(): string { - const d = new Date(); - return `${d.getFullYear()}${pad2(d.getMonth() + 1)}${pad2(d.getDate())}-${pad2(d.getHours())}${pad2(d.getMinutes())}${pad2(d.getSeconds())}`; -} - -function runOne( - workerIndex: number, - scriptPath: string, - runStamp: string -): Promise { - return new Promise((resolve, reject) => { - const cmd = process.platform === "win32" ? "pnpm.cmd" : "pnpm"; - - const logsDir = join(process.cwd(), "logs"); - mkdirSync(logsDir, { recursive: true }); - const logPath = join( - logsDir, - `artillery-${runStamp}-worker${workerIndex}.log` - ); - const file = createWriteStream(logPath, { flags: "a" }); - file.write(`[worker ${workerIndex}] script=${scriptPath}\n`); - - const child = spawn(cmd, ["exec", "artillery", "run", scriptPath], { - stdio: ["ignore", "pipe", "pipe"], - env: { - ...process.env, - ARTILLERY_WORKER_INDEX: String(workerIndex), - }, - }); - child.on("error", reject); - child.stdout.on("data", (chunk: Buffer) => { - file.write(chunk); - process.stdout.write(chunk); - }); - child.stderr.on("data", (chunk: Buffer) => { - file.write(chunk); - process.stderr.write(chunk); - }); - child.on("exit", (code) => resolve(code ?? 1)); - }); -} - -async function main(): Promise { - const { scriptPath, workers } = parseArgs(process.argv); - const runStamp = stamp(); - - if (workers === 1) { - const code = await runOne(1, scriptPath, runStamp); - process.exitCode = code; - return; - } - - const results = await Promise.all( - Array.from({ length: workers }, (_v, i) => - runOne(i + 1, scriptPath, runStamp) - ) - ); - - const worst = results.reduce((acc, x) => (x !== 0 ? x : acc), 0); - process.exitCode = worst; -} - -main().catch((err: unknown) => { - console.error(err); - process.exitCode = 1; -}); diff --git a/scripts/run-scenario.ts b/scripts/run-scenario.ts new file mode 100644 index 0000000..56440ce --- /dev/null +++ b/scripts/run-scenario.ts @@ -0,0 +1,191 @@ +import { spawn } from "node:child_process"; +import { mkdirSync } from "node:fs"; +import * as http from "node:http"; +import type { IncomingMessage, ServerResponse } from "node:http"; +import { join } from "node:path"; + +/** + * Scenario runner wrapper used by `pnpm test:run` to provide TWO core features: + * + * 1) **Per-run logs** + * Ensures JSONL logging is written to a run-specific file via `LOG_FILE` + `RUN_ID`. + * + * 2) **Unique account indices across VUs** + * Starts a local "index allocator" HTTP server and sets `INDEX_ALLOCATOR_URL` for + * the child process. This avoids duplicated indices when Artillery VUs run in + * isolated JS sandboxes (where in-process counters can repeat). + * + * Consumer: + * `src/processors/account-derive.ts` calls `${INDEX_ALLOCATOR_URL}/next` to get a unique + * derivation index per VU (see `fetchNextIndex()` / `deriveAccount()`). + */ + +type IndexAllocatorConfig = Readonly<{ + startIndex: number; +}>; + +function parseIndexAllocatorStart(raw: string | undefined): number { + if (!raw) return 0; + const n = Number.parseInt(raw, 10); + if (!Number.isFinite(n) || n < 0) { + // eslint-disable-next-line no-console + console.error( + `[run-scenario] Invalid INDEX_ALLOCATOR_START=${String( + raw + )} (expected integer >= 0)` + ); + process.exit(2); + } + return n; +} + +async function startIndexAllocatorServer( + cfg: IndexAllocatorConfig +): Promise Promise }>> { + let counter = cfg.startIndex; + const server = http.createServer( + (req: IncomingMessage, res: ServerResponse) => { + try { + const url = req.url ?? "/"; + + // Endpoint: + // - GET /next -> { index: number } + if (req.method === "GET" && url.startsWith("/next")) { + const idx = counter++; + res.writeHead(200, { "content-type": "application/json" }); + res.end(JSON.stringify({ index: idx })); + return; + } + + res.writeHead(404, { "content-type": "application/json" }); + res.end(JSON.stringify({ error: "not_found" })); + } catch (_e) { + res.writeHead(500, { "content-type": "application/json" }); + res.end(JSON.stringify({ error: "internal_error" })); + } + } + ); + + // Bind to loopback only; ephemeral port. + await new Promise((resolve, reject) => { + server.once("error", reject); + server.listen(0, "127.0.0.1", () => resolve()); + }); + + const addr = server.address(); + if (!addr || typeof addr === "string") { + throw new Error("Failed to bind allocator server"); + } + + const url = `http://127.0.0.1:${addr.port}`; + + const close = async (): Promise => { + await new Promise((resolve) => server.close(() => resolve())); + }; + + return { url, close }; +} + +function usageAndExit(): never { + // eslint-disable-next-line no-console + console.error( + [ + "Usage:", + " pnpm exec tsx scripts/run-scenario.ts -- [args...]", + "", + "Core features:", + " - Per-run logs: sets LOG_FILE / RUN_ID if not already set.", + " - Index allocator: starts a local server and sets INDEX_ALLOCATOR_URL for the child process.", + " (Consumed by src/processors/account-derive.ts via GET /next.)", + "", + "Examples:", + " pnpm exec tsx scripts/run-scenario.ts -- artillery run scenarios/examples.getProfile.yml", + " LOG_FILE=./logs/my-run.jsonl pnpm exec tsx scripts/run-scenario.ts -- artillery run scenarios/examples.getProfile.yml", + ].join("\n") + ); + process.exit(2); +} + +function nowStamp(): string { + const d = new Date(); + const pad2 = (n: number) => (n < 10 ? `0${n}` : String(n)); + return `${d.getFullYear()}${pad2(d.getMonth() + 1)}${pad2(d.getDate())}-${pad2(d.getHours())}${pad2(d.getMinutes())}${pad2(d.getSeconds())}`; +} + +function randomSuffix(): string { + return Math.random().toString(16).slice(2, 8); +} + +function sanitizeRunId(raw: string): string { + // Keep it path-safe and easy to read. + return raw.replaceAll(/[^a-zA-Z0-9._-]/g, "_").slice(0, 80); +} + +function ensureRunLogEnv(env: NodeJS.ProcessEnv): NodeJS.ProcessEnv { + if (env.LOG_FILE && env.LOG_FILE.trim().length > 0) { + return env; + } + + const runIdRaw = + env.RUN_ID && env.RUN_ID.trim().length > 0 + ? env.RUN_ID.trim() + : `${nowStamp()}-${randomSuffix()}`; + const runId = sanitizeRunId(runIdRaw); + + const dir = join(process.cwd(), "logs"); + mkdirSync(dir, { recursive: true }); + + return { + ...env, + RUN_ID: runId, + LOG_FILE: join(dir, `run-${runId}.jsonl`), + }; +} + +const sepIdx = process.argv.indexOf("--"); +if (sepIdx === -1) usageAndExit(); + +const cmd = process.argv[sepIdx + 1]; +if (!cmd) usageAndExit(); + +const args = process.argv.slice(sepIdx + 2); + +// Core feature #1: per-run logs +const runLogEnv = ensureRunLogEnv(process.env); + +// Core feature #2: shared unique account indices for the run +const allocator = await startIndexAllocatorServer({ + startIndex: parseIndexAllocatorStart(process.env.INDEX_ALLOCATOR_START), +}); + +const childEnv: NodeJS.ProcessEnv = { + ...runLogEnv, + INDEX_ALLOCATOR_URL: allocator.url, +}; + +// eslint-disable-next-line no-console +console.log(`[run-scenario] LOG_FILE=${childEnv.LOG_FILE}`); +// eslint-disable-next-line no-console +console.log(`[run-scenario] INDEX_ALLOCATOR_URL=${allocator.url}`); + +const child = spawn(cmd, args, { + stdio: "inherit", + env: childEnv, + shell: process.platform === "win32", +}); + +child.on("exit", (code, signal) => { + void (async () => { + try { + await allocator.close(); + } catch { + // best-effort cleanup + } + + if (signal) { + process.kill(process.pid, signal); + return; + } + process.exit(code ?? 1); + })(); +}); diff --git a/src/config.ts b/src/config.ts index 828c180..f934aa7 100644 --- a/src/config.ts +++ b/src/config.ts @@ -2,25 +2,21 @@ export type Env = Readonly<{ network: "testnet" | "stagenet" | "local"; }>; -function getRequiredEnv(key: string): string { - const value = process.env[key]; - if (!value) { - throw new Error(`Missing required env var: ${key}`); +export type NetworkName = Env["network"]; + +import { readRequiredEnv } from "./helpers/env.js"; + +export function parseNetworkName(raw: string): NetworkName { + const v = raw.trim(); + if (v === "testnet" || v === "stagenet" || v === "local") { + return v; } - return value; + throw new Error( + `Invalid NETWORK: ${raw} (expected 'testnet', 'stagenet' or 'local')` + ); } export function readEnv(): Env { - const networkRaw = getRequiredEnv("NETWORK"); - if ( - networkRaw !== "testnet" && - networkRaw !== "stagenet" && - networkRaw !== "local" - ) { - throw new Error( - `Invalid NETWORK: ${networkRaw} (expected 'testnet', 'stagenet' or 'local')` - ); - } - const network = networkRaw; + const network = parseNetworkName(readRequiredEnv("NETWORK")); return { network }; } diff --git a/src/helpers/accountIndex.ts b/src/helpers/accountIndex.ts new file mode 100644 index 0000000..3e5a675 --- /dev/null +++ b/src/helpers/accountIndex.ts @@ -0,0 +1,202 @@ +import { + requireDict, + requireInteger, + requireNonEmptyString, + type Dict, +} from "./validation.js"; + +export type AccountMode = "byIndex" | "sequential" | "random"; + +export type AccountIndexSelection = Readonly<{ + mode: AccountMode; + index: number; + /** + * Human-readable explanation of how the index was chosen (for logs/debug). + * Do not include secrets. + */ + source: string; +}>; + +let sequentialCounter = 0; + +/** + * Strict parsing/validation entry point for account-index selection. + * + * Why: + * `context.vars` is an untyped runtime boundary (Artillery), so we validate once up-front + * and then operate on a typed structure. + */ + +export type ParsedAccountIndexConfig = + | Readonly<{ mode: "byIndex"; accountIndex: number; source: string }> + | Readonly<{ mode: "sequential"; start: number; count: number }> + | Readonly<{ mode: "random"; start: number; count: number; seed?: number }>; + +export function parseAccountIndexConfig( + rawVars: unknown +): ParsedAccountIndexConfig { + const vars = requireDict(rawVars, "context.vars"); + + const modeRaw = requireNonEmptyString(vars.ACCOUNT_MODE, "ACCOUNT_MODE"); + if ( + modeRaw !== "byIndex" && + modeRaw !== "sequential" && + modeRaw !== "random" + ) { + throw new Error( + `Missing or invalid ACCOUNT_MODE: ${modeRaw} (expected 'byIndex', 'sequential', or 'random')` + ); + } + + if (modeRaw === "byIndex") { + // Payload override is allowed (if present): + if (vars.accountIndex !== undefined) { + const idx = requireInteger(vars.accountIndex, "accountIndex"); + if (idx < 0) throw new Error("accountIndex must be >= 0"); + return { + mode: "byIndex", + accountIndex: idx, + source: "payload:accountIndex", + }; + } + + const idx = requireInteger(vars.ACCOUNT_INDEX, "ACCOUNT_INDEX"); + if (idx < 0) throw new Error("ACCOUNT_INDEX must be >= 0"); + return { + mode: "byIndex", + accountIndex: idx, + source: "variables:ACCOUNT_INDEX", + }; + } + + const start = requireInteger(vars.ACCOUNT_INDEX_START, "ACCOUNT_INDEX_START"); + const count = requireInteger(vars.ACCOUNT_INDEX_COUNT, "ACCOUNT_INDEX_COUNT"); + if (start < 0) throw new Error("ACCOUNT_INDEX_START must be >= 0"); + if (count <= 0) throw new Error("ACCOUNT_INDEX_COUNT must be > 0"); + + if (modeRaw === "sequential") { + return { mode: "sequential", start, count }; + } + + // random + if (vars.ACCOUNT_RANDOM_SEED !== undefined) { + const seed = requireInteger( + vars.ACCOUNT_RANDOM_SEED, + "ACCOUNT_RANDOM_SEED" + ); + return { mode: "random", start, count, seed }; + } + return { mode: "random", start, count }; +} + +function normalizeMode(raw: unknown): AccountMode { + if (raw === "byIndex" || raw === "sequential" || raw === "random") return raw; + throw new Error( + `Missing or invalid ACCOUNT_MODE: ${String( + raw + )} (expected 'byIndex', 'sequential', or 'random')` + ); +} + +function parseWorkerIndex(raw: string | undefined): number { + if (!raw) return 0; + const n = Number.parseInt(raw, 10); + if (!Number.isFinite(n) || n <= 0) return 0; + // 1-based in our parallel runner; convert to 0-based offset. + return n - 1; +} + +/** + * Simple seeded PRNG (Linear Congruential Generator). + * + * - Deterministic: same seed => same sequence + * - Not cryptographically secure (fine for load-test user selection) + * - Returns values in [0, 1) + * + * Used only when `ACCOUNT_MODE=random` and `ACCOUNT_RANDOM_SEED` is set, so test runs + * are reproducible. + */ +function lcg(seed: number): () => number { + let state = seed >>> 0; + return () => { + // Numerical Recipes LCG constants + state = (Math.imul(1664525, state) + 1013904223) >>> 0; + return state / 2 ** 32; + }; +} + +export function selectAccountIndex(rawVars: unknown): AccountIndexSelection { + // Entry point: `context.vars` must be an object; fail fast on misconfigured scenarios/callers. + const vars = requireDict(rawVars, "context.vars"); + + // Cache: keep index stable for the duration of the VU. + if (vars.__accountIndex !== undefined) { + const cached = requireInteger(vars.__accountIndex, "__accountIndex"); + if (cached < 0) throw new Error("__accountIndex must be >= 0"); + + const cachedModeRaw = requireNonEmptyString( + vars.__accountMode ?? "byIndex", + "__accountMode" + ); + const cachedMode = normalizeMode(cachedModeRaw); + + const cachedSource = requireNonEmptyString( + vars.__accountIndexSource ?? "cached", + "__accountIndexSource" + ); + + return { + mode: cachedMode, + index: cached, + source: `cached (${cachedSource})`, + }; + } + + // Strict entry point: validate once, then operate on typed config. + const cfg = parseAccountIndexConfig(vars); + + const workerOffset = parseWorkerIndex(process.env.ARTILLERY_WORKER_INDEX); + + if (cfg.mode === "byIndex") { + return { + mode: "byIndex", + index: cfg.accountIndex, + source: cfg.source, + }; + } + + if (cfg.mode === "sequential") { + const local = sequentialCounter++; + const idx = cfg.start + ((local + workerOffset) % cfg.count); + if (idx < 0) throw new Error("Derived index must be >= 0"); + return { + mode: "sequential", + index: idx, + source: `sequential(local=${local}, workerOffset=${workerOffset})`, + }; + } + + // random + const rnd = + typeof cfg.seed === "number" ? lcg(cfg.seed + workerOffset) : Math.random; + const pick = Math.floor(rnd() * cfg.count); + const idx = cfg.start + pick; + if (idx < 0) throw new Error("Derived index must be >= 0"); + return { + mode: "random", + index: idx, + source: + typeof cfg.seed === "number" + ? `random(seed=${cfg.seed}, workerOffset=${workerOffset})` + : `random(unseeded, workerOffset=${workerOffset})`, + }; +} + +export function cacheAccountIndex( + vars: Record, + selection: AccountIndexSelection +): void { + vars.__accountIndex = selection.index; + vars.__accountMode = selection.mode; + vars.__accountIndexSource = selection.source; +} diff --git a/src/helpers/accounts.ts b/src/helpers/accounts.ts new file mode 100644 index 0000000..d05bf87 --- /dev/null +++ b/src/helpers/accounts.ts @@ -0,0 +1,61 @@ +import type { Hex } from "viem"; +import { toHex } from "viem"; +import { mnemonicToAccount } from "viem/accounts"; +import type { HDAccount } from "viem/accounts"; + +export type DerivationInfo = Readonly<{ + index: number; + path: `m/44'/60'/${string}`; +}>; + +export type DerivedAccount = Readonly<{ + account: HDAccount; + derivation: DerivationInfo; + /** + * Derived private key (hex, 0x-prefixed). + * + * IMPORTANT: Never log this unless explicitly gated by config. + */ + privateKey: Hex; +}>; + +export function derivePath(index: number): `m/44'/60'/${string}` { + if (!Number.isInteger(index) || index < 0) { + throw new Error( + `Invalid derivation index: ${String(index)} (expected integer >= 0)` + ); + } + return `m/44'/60'/0'/0/${index}`; +} + +export function deriveAccountFromMnemonic( + mnemonic: string, + index: number +): DerivedAccount { + if (typeof mnemonic !== "string" || mnemonic.trim().length === 0) { + throw new Error("Missing or invalid mnemonic"); + } + + const path = derivePath(index); + const account = mnemonicToAccount(mnemonic, { path }); + + const hdKey = account.getHdKey(); + const privateKey = + hdKey.privateKey && hdKey.privateKey.length > 0 + ? toHex(hdKey.privateKey) + : undefined; + + if (!privateKey) { + // Sanity check: `mnemonicToAccount(...).getHdKey()` should normally yield a private key + // for a valid mnemonic + derivation path. If this happens, something is unexpectedly + // wrong (or the underlying library behavior changed), and callers cannot proceed + // because they always require a signer. + throw new Error("Derived account has no privateKey available"); + } + + return { + account, + derivation: { index, path }, + privateKey, + }; +} diff --git a/src/helpers/artillery.ts b/src/helpers/artillery.ts new file mode 100644 index 0000000..38b2b7d --- /dev/null +++ b/src/helpers/artillery.ts @@ -0,0 +1,111 @@ +export type Done = (error?: Error) => void; + +export type ArtilleryEvents = Readonly<{ + emit: (type: string, name: string, value: number) => void; +}>; + +export type ArtilleryContext = { + vars?: Record; + scenario?: { + vars?: Record; + }; +}; + +/** + * Artillery context notes: + * - `context` is scoped to a single VU (virtual user). `context.vars` is NOT global across VUs. + * - Artillery may merge `context.scenario.vars` back into `context.vars` across steps/iterations. + * We use `persistVars()` for values that must persist reliably for the rest of a scenario/VU + * (e.g. derived account info, SIWE session, muting flags). + */ +export function ensureVars(context: ArtilleryContext): Record { + if (!context.vars) context.vars = {}; + return context.vars; +} + +/** + * Ensure `context.scenario.vars` exists and return it. + * Use this for values that must persist across scenario iterations/loops. + * + * Side effect: may create `context.scenario` and/or `context.scenario.vars`. + */ +export function ensureScenarioVars( + context: ArtilleryContext +): Record { + if (!context.scenario) context.scenario = {}; + if (!context.scenario.vars) context.scenario.vars = {}; + return context.scenario.vars; +} + +/** + * Persist values to both: + * - context.vars (available immediately in the current step/iteration) + * - context.scenario.vars (persists across iterations; Artillery merges scenario vars back into vars) + */ +export function persistVars( + context: ArtilleryContext, + patch: Record +): void { + const vars = ensureVars(context); + const svars = ensureScenarioVars(context); + Object.assign(vars, patch); + Object.assign(svars, patch); +} + +/** + * Read a required string variable. + * + * Throws if missing, not a string, or blank. + */ +export function requireVarString( + vars: Record, + key: string +): string { + const v = vars[key]; + if (typeof v !== "string" || v.trim().length === 0) { + throw new Error(`Missing or invalid var: ${key}`); + } + return v.trim(); +} + +/** + * Parse a "boolean-like" variable from vars. + * + * Accepted values: + * - boolean: true/false + * - number: 1/0 + * - string: true/false, 1/0, yes/no (case-insensitive) + * + * Returns `undefined` if key is missing or value is not recognized. + */ +export function readVarBool( + vars: Record, + key: string +): boolean | undefined { + const v = vars[key]; + if (typeof v === "boolean") return v; + if (typeof v === "number") + return v === 1 ? true : v === 0 ? false : undefined; + if (typeof v === "string") { + const s = v.trim().toLowerCase(); + if (s === "true" || s === "1" || s === "yes") return true; + if (s === "false" || s === "0" || s === "no") return false; + } + return undefined; +} + +/** + * Read a "persisted" var from Artillery context. + * + * Artillery may merge `context.scenario.vars` back into `context.vars` between steps/iterations. + * This helper checks both places (prefer `context.vars`) to avoid boilerplate in processors. + */ +export function getPersistedVar( + context: ArtilleryContext, + key: string +): unknown { + const vars = ensureVars(context); + if (Object.prototype.hasOwnProperty.call(vars, key)) return vars[key]; + const svars = ensureScenarioVars(context); + return svars[key]; +} diff --git a/src/helpers/env.ts b/src/helpers/env.ts new file mode 100644 index 0000000..9341bc3 --- /dev/null +++ b/src/helpers/env.ts @@ -0,0 +1,13 @@ +export function readRequiredEnv(key: string): string { + const v = process.env[key]; + if (!v || v.trim().length === 0) { + throw new Error(`Missing required env var: ${key}`); + } + return v.trim(); +} + +export function readBoolEnv(key: string): boolean { + const v = process.env[key]; + if (!v) return false; + return v === "1" || v.toLowerCase() === "true" || v.toLowerCase() === "yes"; +} diff --git a/src/helpers/errors.ts b/src/helpers/errors.ts new file mode 100644 index 0000000..4ac321f --- /dev/null +++ b/src/helpers/errors.ts @@ -0,0 +1,4 @@ +export function toError(err: unknown): Error { + if (err instanceof Error) return err; + return new Error(typeof err === "string" ? err : "Unknown error"); +} diff --git a/src/helpers/metrics.ts b/src/helpers/metrics.ts new file mode 100644 index 0000000..8855fe3 --- /dev/null +++ b/src/helpers/metrics.ts @@ -0,0 +1,55 @@ +import { + ensureScenarioVars, + ensureVars, + readVarBool, + type ArtilleryContext, + type ArtilleryEvents, +} from "./artillery.js"; + +export function metricsMuted(context: ArtilleryContext): boolean { + const vars = ensureVars(context); + const svars = ensureScenarioVars(context); + return ( + readVarBool(vars, "__muteMetrics") === true || + readVarBool(svars, "__muteMetrics") === true || + readVarBool(vars, "MUTE_METRICS") === true || + readVarBool(svars, "MUTE_METRICS") === true + ); +} + +function isErrorMetricName(name: string): boolean { + // Convention used across this repo: counters for failures end with ".err" + // (e.g. "auth.siwe.err", "init.derive.err", "action.getProfile.err"). + return name.endsWith(".err") || name.endsWith(".error"); +} + +export type MetricsEmitter = Readonly<{ + counter: (name: string, value?: number) => void; + histogram: (name: string, value: number) => void; +}>; + +/** + * Option B: bind an emitter once per step. + * + * Policy: + * - not muted: emit everything + * - muted: emit only counters that look like errors (name ends with .err/.error), and drop histograms + */ +export function createEmitter( + context: ArtilleryContext, + events: ArtilleryEvents +): MetricsEmitter { + const muted = metricsMuted(context); + + const counter = (name: string, value = 1): void => { + if (muted && !isErrorMetricName(name)) return; + events.emit("counter", name, value); + }; + + const histogram = (name: string, value: number): void => { + if (muted) return; + events.emit("histogram", name, value); + }; + + return { counter, histogram }; +} diff --git a/src/helpers/validation.ts b/src/helpers/validation.ts new file mode 100644 index 0000000..0c6489f --- /dev/null +++ b/src/helpers/validation.ts @@ -0,0 +1,71 @@ +/** + * Ensure a string is 0x-prefixed. + * If it already starts with `0x`, return as-is; otherwise prepend `0x`. + * + * If `bytes` is provided, enforce the expected length for a 0x-hex string: + * - total length must be `2 + bytes * 2` (e.g. 32 bytes => 66 chars including `0x`) + * + * This does not validate hex content (keep that check where needed). + */ +export function ensure0xPrefix(raw: string, bytes?: number): `0x${string}` { + const s = raw.trim(); + const value = (s.startsWith("0x") ? s : `0x${s}`) as `0x${string}`; + if (bytes !== undefined) { + const expectedLen = 2 + bytes * 2; + if (value.length !== expectedLen) { + throw new Error( + `Expected 0x-prefixed hex string of ${bytes} bytes (length ${expectedLen}), got length ${value.length}` + ); + } + } + return value; +} + +export type Dict = Readonly>; + +/** + * Require an object-like dictionary at a runtime boundary. + * + * Note: This intentionally does not try to validate nested shapes; it just ensures + * we can safely index into the returned value. + */ +export function requireDict(value: unknown, label: string): Dict { + if (!value || typeof value !== "object") { + throw new Error(`${label} is not an object`); + } + return value as Dict; +} + +/** + * Require a non-empty string. + */ +export function requireNonEmptyString(value: unknown, label: string): string { + if (typeof value !== "string" || value.trim() === "") { + throw new Error(`${label} must be a non-empty string`); + } + return value.trim(); +} + +/** + * Require an integer-like value. + * + * Accepts: + * - integer numbers + * - numeric strings representing integers (e.g. "42") + */ +export function requireInteger(value: unknown, label: string): number { + let n: number; + if (typeof value === "number") { + n = value; + } else if (typeof value === "string") { + n = Number(value); + } else { + throw new Error(`${label} must be an integer`); + } + + if (!Number.isInteger(n)) { + throw new Error(`${label} must be an integer`); + } + + return n; +} diff --git a/src/log.ts b/src/log.ts index 16c2a9e..0c65f42 100644 --- a/src/log.ts +++ b/src/log.ts @@ -107,14 +107,23 @@ export function getLogger(): Logger { } const cfg = readLogConfig(); - const streams: Array<{ stream: pino.DestinationStream }> = []; + // NOTE: When using pino.multistream, each stream has its own level filter. + // If you omit it, pino defaults that stream to "info", which would drop debug logs. + const streams: Array<{ stream: pino.DestinationStream; level: LogLevel }> = + []; if (cfg.consoleEnabled) { - streams.push({ stream: pino.destination({ dest: 1, sync: false }) }); + streams.push({ + stream: pino.destination({ dest: 1, sync: false }), + level: cfg.level, + }); } if (cfg.filePath) { streams.push({ - stream: pino.destination({ dest: cfg.filePath, sync: false }), + // Artillery runs can end quickly and may not flush async streams. + // Use sync writes for file logs to avoid empty/truncated JSONL. + stream: pino.destination({ dest: cfg.filePath, sync: true }), + level: cfg.level, }); } diff --git a/src/networks.ts b/src/networks.ts index abf8151..6f5b16d 100644 --- a/src/networks.ts +++ b/src/networks.ts @@ -5,6 +5,11 @@ export type NetworkConfig = Readonly<{ chain: Readonly<{ id: number; name: string; + nativeCurrency: Readonly<{ + name: string; + symbol: string; + decimals: number; + }>; evmRpcUrl: string; substrateWsUrl: `${"ws" | "wss"}://${string}`; filesystemPrecompileAddress: `0x${string}`; @@ -26,6 +31,7 @@ export const NETWORKS: Readonly> = { chain: { id: 55931, name: "DataHaven Testnet", + nativeCurrency: { name: "DH Testnet", symbol: "MOCK", decimals: 18 }, evmRpcUrl: "https://services.datahaven-testnet.network/testnet", substrateWsUrl: "wss://services.datahaven-testnet.network/testnet", filesystemPrecompileAddress: "0x0000000000000000000000000000000000000404", @@ -42,6 +48,7 @@ export const NETWORKS: Readonly> = { chain: { id: 55932, name: "DataHaven Stagenet", + nativeCurrency: { name: "DH Stagenet", symbol: "STAGE", decimals: 18 }, evmRpcUrl: "https://services.datahaven-dev.network/stagenet", substrateWsUrl: "wss://services.datahaven-dev.network/stagenet", filesystemPrecompileAddress: "0x0000000000000000000000000000000000000404", @@ -58,6 +65,7 @@ export const NETWORKS: Readonly> = { chain: { id: 181222, name: "StorageHub Solochain EVM", + nativeCurrency: { name: "StorageHub", symbol: "SH", decimals: 18 }, evmRpcUrl: "http://127.0.0.1:9888", substrateWsUrl: "ws://127.0.0.1:9888", filesystemPrecompileAddress: "0x0000000000000000000000000000000000000064", diff --git a/src/privateKeys.ts b/src/privateKeys.ts deleted file mode 100644 index 85c2ffe..0000000 --- a/src/privateKeys.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { readFileSync } from "node:fs"; -import { join } from "node:path"; - -export type PrivateKeySource = Readonly<{ - keys: ReadonlyArray; - sourcePath: string; -}>; - -function defaultKeysPath(): string { - return join(process.cwd(), "data", "private_keys.csv"); -} - -function readKeysPath(): string { - return process.env.PRIVATE_KEYS_FILE ?? defaultKeysPath(); -} - -function parseCsv(text: string): ReadonlyArray { - const lines = text - .split(/\r?\n/g) - .map((l) => l.trim()) - .filter((l) => l.length > 0); - - const out: string[] = []; - for (const line of lines) { - if (line.toLowerCase() === "privatekey") { - continue; - } - // One-column CSV: privateKey - out.push(line); - } - return out; -} - -function parseJson(text: string): ReadonlyArray { - const parsed: unknown = JSON.parse(text); - if (Array.isArray(parsed)) { - const out: string[] = []; - for (const item of parsed) { - if (typeof item === "string") { - out.push(item); - continue; - } - if ( - item && - typeof item === "object" && - typeof (item as Record).privateKey === "string" - ) { - out.push((item as Record).privateKey as string); - } - } - return out; - } - throw new Error("JSON must be an array of strings or objects { privateKey }"); -} - -export function loadPrivateKeys(): PrivateKeySource { - const sourcePath = readKeysPath(); - const text = readFileSync(sourcePath, "utf8"); - const keys = sourcePath.endsWith(".json") ? parseJson(text) : parseCsv(text); - if (keys.length === 0) { - throw new Error(`No private keys found in ${sourcePath}`); - } - return { keys, sourcePath }; -} - -let cache: PrivateKeySource | undefined; -let idx = 0; - -export function nextPrivateKey(): Readonly<{ - privateKey: string; - sourcePath: string; -}> { - if (!cache) { - cache = loadPrivateKeys(); - } - const keys = cache.keys; - const privateKey = keys[idx % keys.length]; - if (!privateKey) { - throw new Error( - `No private key available at index ${idx} from ${cache.sourcePath}` - ); - } - idx += 1; - return { privateKey, sourcePath: cache.sourcePath }; -} diff --git a/src/processors/account-derive.ts b/src/processors/account-derive.ts new file mode 100644 index 0000000..fe43415 --- /dev/null +++ b/src/processors/account-derive.ts @@ -0,0 +1,136 @@ +import { + cacheAccountIndex, + selectAccountIndex, +} from "../helpers/accountIndex.js"; +import { deriveAccountFromMnemonic } from "../helpers/accounts.js"; +import { + ensureScenarioVars, + ensureVars, + requireVarString, + persistVars, + type ArtilleryContext, + type ArtilleryEvents, + type Done, +} from "../helpers/artillery.js"; +import { toError } from "../helpers/errors.js"; +import { getLogger } from "../log.js"; +import { createEmitter } from "../helpers/metrics.js"; + +/** + * Fetch the next unique account index from the local index allocator service. + * + * - `scripts/run-scenario.ts` starts a tiny HTTP server per test run. + * - It exposes `GET /next` which returns `{ index: 0 }`, `{ index: 1 }`, ... + * - This is how we guarantee global uniqueness/sequentiality across Artillery VUs, + * even when Artillery runs VUs in multiple isolated JS sandboxes (where in-process + * counters or payload sequencing can duplicate). + * + * Timeout behavior: + * - Controlled by `INDEX_ALLOCATOR_TIMEOUT_MS` (default 2000ms) + * - Implemented via `AbortController` to avoid hanging a VU indefinitely. + */ +async function fetchNextIndex(): Promise { + const baseUrl = process.env.INDEX_ALLOCATOR_URL?.trim(); + if (!baseUrl) { + throw new Error( + "Missing INDEX_ALLOCATOR_URL (index allocator not running)" + ); + } + + const timeoutMsRaw = process.env.INDEX_ALLOCATOR_TIMEOUT_MS?.trim(); + const timeoutMs = + timeoutMsRaw && timeoutMsRaw.length > 0 + ? Number.parseInt(timeoutMsRaw, 10) + : 2000; + const ms = Number.isFinite(timeoutMs) && timeoutMs > 0 ? timeoutMs : 2000; + + const ctrl = new AbortController(); + const t = setTimeout(() => ctrl.abort(), ms); + try { + const res = await fetch(`${baseUrl}/next`, { signal: ctrl.signal }); + if (!res.ok) { + throw new Error(`allocator /next failed: HTTP ${res.status}`); + } + const body = (await res.json()) as unknown; + if (!body || typeof body !== "object") { + throw new Error("allocator response is not an object"); + } + const idx = (body as { index?: unknown }).index; + if (!Number.isInteger(idx) || (idx as number) < 0) { + throw new Error(`allocator returned invalid index: ${String(idx)}`); + } + return idx as number; + } finally { + clearTimeout(t); + } +} + +/** + * Processor step: + * - pick an index (allocator if enabled, else ACCOUNT_MODE vars) + * - derive account from TEST_MNEMONIC + * - store derived privateKey + derivation info into context.vars + */ +export async function deriveAccount( + context: ArtilleryContext, + events: ArtilleryEvents, + done?: Done +): Promise { + const start = Date.now(); + try { + const m = createEmitter(context, events); + const logger = getLogger(); + const vars = ensureVars(context); + const scenarioVars = ensureScenarioVars(context); + + const mnemonic = + process.env.TEST_MNEMONIC?.trim() ?? + requireVarString(vars, "TEST_MNEMONIC"); + + // Cache: keep index stable for the duration of the VU. + let selection = selectAccountIndex(vars); + if (process.env.INDEX_ALLOCATOR_URL?.trim()) { + // When allocator is enabled, override the selection once per VU + // (this avoids duplicates caused by Artillery sandboxing). + if (!Number.isInteger(vars.__accountIndex)) { + const idx = await fetchNextIndex(); + selection = { mode: "byIndex", index: idx, source: "allocator:/next" }; + persistVars(context, { accountIndex: idx }); + } + } + cacheAccountIndex(vars, selection); + cacheAccountIndex(scenarioVars, selection); + + const derived = deriveAccountFromMnemonic(mnemonic, selection.index); + + persistVars(context, { + privateKey: derived.privateKey, + __accountAddress: derived.account.address, + __derivationPath: derived.derivation.path, + }); + + logger.debug( + { + index: selection.index, + path: derived.derivation.path, + address: derived.account.address, + source: selection.source, + }, + "Derived account" + ); + + m.counter("init.derive.ok", 1); + m.histogram("init.derive.ms", Date.now() - start); + done?.(); + } catch (err) { + try { + const logger = getLogger(); + logger.error({ err }, "deriveAccount failed"); + } catch { + // ignore logger failures + } + const m = createEmitter(context, events); + m.counter("init.derive.err", 1); + done?.(toError(err)); + } +} diff --git a/src/processors/authentication.ts b/src/processors/authentication.ts new file mode 100644 index 0000000..88f4c06 --- /dev/null +++ b/src/processors/authentication.ts @@ -0,0 +1,108 @@ +import { MspClient, type Session } from "@storagehub-sdk/msp-client"; +import { parseNetworkName } from "../config.js"; +import { getLogger } from "../log.js"; +import { NETWORKS } from "../networks.js"; +import { toError } from "../helpers/errors.js"; +import { createViemWallet } from "../sdk/viemWallet.js"; +import { buildMspHttpClientConfig } from "../sdk/mspHttpConfig.js"; +import { privateKeyToAccount } from "viem/accounts"; +import { + ensureVars, + persistVars, + requireVarString, + type ArtilleryContext, + type ArtilleryEvents, + type Done, +} from "../helpers/artillery.js"; +import { ensure0xPrefix } from "../helpers/validation.js"; +import { createEmitter } from "../helpers/metrics.js"; + +/** + * Authentication step: SIWE + * + * Requirements: + * - `context.vars.privateKey` (0x-prefixed) + * + * Side effects: + * - sets `__siweSession` in context.vars + */ +export async function SIWE( + context: ArtilleryContext, + events: ArtilleryEvents, + done?: Done +): Promise { + const start = Date.now(); + try { + const m = createEmitter(context, events); + const logger = getLogger(); + const vars = ensureVars(context); + const networkName = parseNetworkName( + process.env.NETWORK?.trim() ?? requireVarString(vars, "NETWORK") + ); + const network = NETWORKS[networkName]; + + const pkRaw = requireVarString(vars, "privateKey"); + const pk = ensure0xPrefix(pkRaw, 32).toLowerCase() as `0x${string}`; + + const account = privateKeyToAccount(pk); + const walletClient = createViemWallet(network, account); + + const config = buildMspHttpClientConfig(network); + const mspClient = await MspClient.connect(config); + + // Use SDK directly for SIWE auth. + const session = await mspClient.auth.SIWE( + walletClient, + network.msp.siweDomain, + network.msp.siweUri + ); + persistVars(context, { + __siweSession: session satisfies Readonly, + }); + + logger.debug({ address: session.user.address }, "SIWE authenticated"); + + m.counter("auth.siwe.ok", 1); + m.histogram("auth.siwe.ms", Date.now() - start); + done?.(); + } catch (err) { + try { + const logger = getLogger(); + logger.error({ err }, "SIWE failed"); + } catch { + // ignore logger failures + } + const m = createEmitter(context, events); + m.counter("auth.siwe.err", 1); + done?.(toError(err)); + } +} + +/** + * Authentication step: SIWX (dummy placeholder) + * + * This is intentionally a no-op/dummy function so we can evolve it later without + * changing the test flow structure. + * + * Side effects: + * - sets `__siwxToken` in context.vars + */ +export async function SIWX( + context: ArtilleryContext, + events: ArtilleryEvents, + done?: Done +): Promise { + try { + const m = createEmitter(context, events); + const vars = ensureVars(context); + const now = Date.now(); + // Dummy token, not used for real auth. + vars.__siwxToken = `siwx_dummy_${now}`; + m.counter("auth.siwx.ok", 1); + done?.(); + } catch (err) { + const m = createEmitter(context, events); + m.counter("auth.siwx.err", 1); + done?.(toError(err)); + } +} diff --git a/src/processors/connect.ts b/src/processors/connect.ts deleted file mode 100644 index 267d6be..0000000 --- a/src/processors/connect.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { readEnv } from "../config.js"; -import { getLogger } from "../log.js"; -import { - authenticateWithSiwe, - connectMsp, - validateMspConnection, -} from "../sdk/msp.js"; -import { initWalletFromPrivateKey, to0xPrivateKey } from "../sdk/wallet.js"; -import { NETWORKS } from "../networks.js"; -import { nextPrivateKey } from "../privateKeys.js"; -import { nextWalletFromPool } from "../sdk/walletPool.js"; - -type Done = (error?: Error) => void; - -type ArtilleryEvents = Readonly<{ - emit: (type: string, name: string, value: number) => void; -}>; - -type ArtilleryContext = Readonly<{ - vars?: Record; -}>; - -function toError(err: unknown): Error { - if (err instanceof Error) { - return err; - } - return new Error(typeof err === "string" ? err : "Unknown error"); -} - -function readVarString(ctx: ArtilleryContext, key: string): string { - const v = ctx.vars?.[key]; - if (typeof v !== "string" || v.length === 0) { - throw new Error(`Missing or invalid VU var: ${key}`); - } - return v; -} - -function readPrivateKeyForVu(ctx: ArtilleryContext): string { - const fromVars = ctx.vars?.privateKey; - if (typeof fromVars === "string" && fromVars.length > 0) { - return fromVars; - } - const fallback = nextPrivateKey(); - getLogger().warn( - { sourcePath: fallback.sourcePath }, - "privateKey not provided by Artillery vars; using fallback from file" - ); - return fallback.privateKey; -} - -async function maybeClose(obj: unknown): Promise { - if (!obj || typeof obj !== "object") { - return; - } - const record = obj as Record; - const close = record.close; - const destroy = record.destroy; - - const fn = - typeof close === "function" - ? close - : typeof destroy === "function" - ? destroy - : undefined; - if (!fn) { - return; - } - await Promise.resolve(fn()); -} - -export async function connectClients( - context: ArtilleryContext, - events: ArtilleryEvents, - done?: Done -): Promise { - try { - const env = readEnv(); - const logger = getLogger(); - - const privateKeyRaw = readPrivateKeyForVu(context); - const privateKey = to0xPrivateKey(privateKeyRaw); - - const network = NETWORKS[env.network]; - const { walletClient } = context.vars?.privateKey - ? initWalletFromPrivateKey(network, privateKey) - : nextWalletFromPool(network); - const mspConn = await connectMsp(env, logger); - - const mspStart = Date.now(); - await validateMspConnection(mspConn, logger); - events.emit("counter", "sdk.msp.connect.ok", 1); - events.emit("histogram", "sdk.msp.connect.ms", Date.now() - mspStart); - - const authStart = Date.now(); - await authenticateWithSiwe(mspConn, env, walletClient, logger); - events.emit("counter", "sdk.msp.siwe.ok", 1); - events.emit("histogram", "sdk.msp.siwe.ms", Date.now() - authStart); - - // Option A teardown: best-effort (SDK clients may not expose explicit close) - try { - await maybeClose(mspConn.client); - events.emit("counter", "sdk.disconnect.ok", 1); - } catch (err) { - events.emit("counter", "sdk.disconnect.error", 1); - throw err; - } - - if (typeof done === "function") { - done(); - } - } catch (err) { - const error = toError(err); - events.emit("counter", "sdk.connect.error", 1); - if (typeof done === "function") { - done(error); - return; - } - throw error; - } -} diff --git a/src/processors/download.ts b/src/processors/download.ts index 28352df..11a2b0f 100644 --- a/src/processors/download.ts +++ b/src/processors/download.ts @@ -1,64 +1,36 @@ import { Readable } from "node:stream"; import { readEnv } from "../config.js"; import { getLogger } from "../log.js"; -import { authenticateWithSiwe, connectMsp } from "../sdk/msp.js"; -import { initWalletFromPrivateKey, to0xPrivateKey } from "../sdk/wallet.js"; +import { MspClient, type Session } from "@storagehub-sdk/msp-client"; import { NETWORKS } from "../networks.js"; -import { nextPrivateKey } from "../privateKeys.js"; - -type ArtilleryEvents = Readonly<{ - emit: (type: string, name: string, value: number) => void; -}>; - -type ArtilleryContext = Readonly<{ - vars?: Record; -}>; - -function getFileKey(): string { - const key = process.env.FILE_KEY; - if (!key) { - throw new Error("FILE_KEY env var is required"); - } - return key; -} +import { toError } from "../helpers/errors.js"; +import { readRequiredEnv } from "../helpers/env.js"; +import { buildMspHttpClientConfig } from "../sdk/mspHttpConfig.js"; +import { createEmitter } from "../helpers/metrics.js"; +import { + getPersistedVar, + type ArtilleryContext, + type ArtilleryEvents, +} from "../helpers/artillery.js"; export async function downloadFile( context: ArtilleryContext, events: ArtilleryEvents ): Promise { + const m = createEmitter(context, events); const logger = getLogger(); const env = readEnv(); const network = NETWORKS[env.network]; - const fileKey = getFileKey(); - - // Get private key (from Artillery vars or fallback) - const privateKeyRaw = - typeof context.vars?.privateKey === "string" && - context.vars.privateKey.length > 0 - ? context.vars.privateKey - : nextPrivateKey().privateKey; + const fileKey = readRequiredEnv("FILE_KEY"); - const privateKey = to0xPrivateKey(privateKeyRaw); - const { walletClient } = initWalletFromPrivateKey(network, privateKey); - - // Connect and authenticate - const conn = await connectMsp(env, logger); - - const siweStart = Date.now(); - try { - await authenticateWithSiwe(conn, env, walletClient, logger); - events.emit("counter", "download.siwe.ok", 1); - events.emit("histogram", "download.siwe.ms", Date.now() - siweStart); - } catch (err) { - events.emit("counter", "download.siwe.err", 1); - logger.error({ err }, "siwe failed"); - throw err; - } + const session = getPersistedVar(context, "__siweSession") as Session; + const config = buildMspHttpClientConfig(network); + const client = await MspClient.connect(config, async () => session); // Download file const dlStart = Date.now(); try { - const file = await conn.client.files.downloadFile(fileKey); + const file = await client.files.downloadFile(fileKey); if (!file?.stream) { throw new Error("downloadFile returned no stream"); } @@ -73,13 +45,14 @@ export async function downloadFile( totalBytes += (chunk as Buffer).length; } - events.emit("counter", "download.file.ok", 1); - events.emit("histogram", "download.file.ms", Date.now() - dlStart); - events.emit("histogram", "download.bytes", totalBytes); + m.counter("download.file.ok", 1); + m.histogram("download.file.ms", Date.now() - dlStart); + m.histogram("download.bytes", totalBytes); logger.info({ fileKey, totalBytes }, "download complete"); } catch (err) { - events.emit("counter", "download.file.err", 1); - logger.error({ err, fileKey }, "download failed"); - throw err; + m.counter("download.file.err", 1); + const error = toError(err); + logger.error({ err: error, fileKey }, "download failed"); + throw error; } } diff --git a/src/processors/examples.ts b/src/processors/examples.ts new file mode 100644 index 0000000..6c75ab9 --- /dev/null +++ b/src/processors/examples.ts @@ -0,0 +1,69 @@ +import { MspClient, type Session } from "@storagehub-sdk/msp-client"; +import { getLogger } from "../log.js"; +import { NETWORKS } from "../networks.js"; +import { toError } from "../helpers/errors.js"; +import { buildMspHttpClientConfig } from "../sdk/mspHttpConfig.js"; +import { + ensureVars, + getPersistedVar, + requireVarString, + type ArtilleryContext, + type ArtilleryEvents, + type Done, +} from "../helpers/artillery.js"; +import { createEmitter } from "../helpers/metrics.js"; +import { readEnv } from "../config.js"; + +// Re-export an "init SIWE" helper for example scenarios. +export { SIWE as initSiwe } from "./authentication.js"; + +/** + * Example action step: + * - recreate MspClient using the stored session + * - call getProfile + * + * This demonstrates the “init -> actions” split without keeping an MspClient instance in memory. + */ +export async function actionGetProfile( + context: ArtilleryContext, + events: ArtilleryEvents, + done?: Done +): Promise { + const start = Date.now(); + try { + const m = createEmitter(context, events); + const logger = getLogger(); + const env = readEnv(); + const network = NETWORKS[env.network]; + + const session = getPersistedVar(context, "__siweSession") as Session; + const config = buildMspHttpClientConfig(network); + const client = await MspClient.connect(config, async () => session); + + const profile = await client.auth.getProfile(); + logger.debug( + { + address: session.user.address, + profile: { + address: profile.address, + ens: profile.ens, + }, + }, + "action getProfile ok" + ); + + m.counter("action.getProfile.ok", 1); + m.histogram("action.getProfile.ms", Date.now() - start); + done?.(); + } catch (err) { + try { + const logger = getLogger(); + logger.error({ err }, "actionGetProfile failed"); + } catch { + // ignore logger failures + } + const m = createEmitter(context, events); + m.counter("action.getProfile.err", 1); + done?.(toError(err)); + } +} diff --git a/src/processors/index.ts b/src/processors/index.ts new file mode 100644 index 0000000..62c88a3 --- /dev/null +++ b/src/processors/index.ts @@ -0,0 +1,18 @@ +// Single entrypoint for all Artillery processor functions. +// +// All Artillery YAML files should point `config.processor` to: +// `../dist/src/processors/index.js` +// +// This module re-exports: +// - common setup steps (e.g. pick index + derive private key, SIWE auth) +// - action steps (e.g. getProfile, download, connect, unauth health/info) +// +// Keeping one processor entrypoint avoids per-scenario re-exports and keeps YAMLs consistent. + +export * from "./account-derive.js"; +export * from "./authentication.js"; +export * from "./download.js"; +export * from "./metrics.js"; +export * from "./msp-unauth.js"; +export * from "./siwe-bootstrap.js"; +export * from "./examples.js"; diff --git a/src/processors/metrics.ts b/src/processors/metrics.ts new file mode 100644 index 0000000..f9aad61 --- /dev/null +++ b/src/processors/metrics.ts @@ -0,0 +1,41 @@ +import { toError } from "../helpers/errors.js"; +import { + persistVars, + type ArtilleryContext, + type ArtilleryEvents, + type Done, +} from "../helpers/artillery.js"; + +/** + * Utility step: mute metric emissions (counters/histograms) for subsequent steps. + * Useful to run init-like steps without polluting Artillery summaries. + */ +export async function muteMetrics( + context: ArtilleryContext, + _events: ArtilleryEvents, + done?: Done +): Promise { + try { + // Policy: muted => emit only error counters, drop ok counters + histograms. + persistVars(context, { __muteMetrics: true }); + done?.(); + } catch (err) { + done?.(toError(err)); + } +} + +/** + * Utility step: unmute metric emissions. + */ +export async function unmuteMetrics( + context: ArtilleryContext, + _events: ArtilleryEvents, + done?: Done +): Promise { + try { + persistVars(context, { __muteMetrics: false }); + done?.(); + } catch (err) { + done?.(toError(err)); + } +} diff --git a/src/processors/msp-unauth.ts b/src/processors/msp-unauth.ts index 121cc3e..98430ca 100644 --- a/src/processors/msp-unauth.ts +++ b/src/processors/msp-unauth.ts @@ -1,75 +1,57 @@ import { MspClient } from "@storagehub-sdk/msp-client"; import type { HttpClientConfig } from "@storagehub-sdk/core"; -import type { Logger } from "pino"; import { getLogger } from "../log.js"; import { readEnv } from "../config.js"; import { NETWORKS } from "../networks.js"; -import type { Env } from "../config.js"; - -type ArtilleryEvents = Readonly<{ - emit: (type: string, name: string, value: number) => void; -}>; - -type ArtilleryContext = Readonly<{ - vars?: Record; -}>; - -function sleep(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -function buildHttpConfig(env: Env): HttpClientConfig { - const network = NETWORKS[env.network]; - const base: { baseUrl: string; timeoutMs?: number } = { - baseUrl: network.msp.baseUrl, - }; - if (typeof network.msp.timeoutMs === "number") { - base.timeoutMs = network.msp.timeoutMs; - } - const overrideRaw = process.env.MSP_TIMEOUT_MS; - if (overrideRaw && overrideRaw.length > 0) { - const n = Number.parseInt(overrideRaw, 10); - if (Number.isFinite(n)) { - base.timeoutMs = n; - } - } - return base; -} - -async function connectUnauth(env: Env, logger: Logger): Promise { - const config = buildHttpConfig(env); - logger.info({ baseUrl: config.baseUrl }, "msp unauth connect"); - return await MspClient.connect(config, async () => undefined); -} - -export async function mspUnauthLoad( +import { createEmitter } from "../helpers/metrics.js"; +import type { + ArtilleryContext, + ArtilleryEvents, +} from "../helpers/artillery.js"; +import { buildMspHttpClientConfig } from "../sdk/mspHttpConfig.js"; + +export async function getHealth( context: ArtilleryContext, events: ArtilleryEvents ): Promise { + const m = createEmitter(context, events); const logger = getLogger(); const env = readEnv(); + const network = NETWORKS[env.network]; - const client = await connectUnauth(env, logger); + const config = buildMspHttpClientConfig(network); + const client = await MspClient.connect(config); const healthStart = Date.now(); try { await client.info.getHealth(); - events.emit("counter", "msp.health.ok", 1); - events.emit("histogram", "msp.health.ms", Date.now() - healthStart); + m.counter("msp.health.ok", 1); + m.histogram("msp.health.ms", Date.now() - healthStart); } catch (err) { - events.emit("counter", "msp.req.err", 1); + m.counter("msp.req.err", 1); logger.debug({ err }, "msp unauth request error"); } +} + +export async function getInfo( + context: ArtilleryContext, + events: ArtilleryEvents +): Promise { + const m = createEmitter(context, events); + const logger = getLogger(); + const env = readEnv(); + const network = NETWORKS[env.network]; - await sleep(1000); + const config = buildMspHttpClientConfig(network); + const client = await MspClient.connect(config); const infoStart = Date.now(); try { await client.info.getInfo(); - events.emit("counter", "msp.info.ok", 1); - events.emit("histogram", "msp.info.ms", Date.now() - infoStart); + m.counter("msp.info.ok", 1); + m.histogram("msp.info.ms", Date.now() - infoStart); } catch (err) { - events.emit("counter", "msp.req.err", 1); + m.counter("msp.req.err", 1); logger.debug({ err }, "msp unauth request error"); } } diff --git a/src/processors/siwe-bootstrap.ts b/src/processors/siwe-bootstrap.ts new file mode 100644 index 0000000..fda563b --- /dev/null +++ b/src/processors/siwe-bootstrap.ts @@ -0,0 +1,76 @@ +import { getLogger } from "../log.js"; +import { + type AccountIndexSelection, + cacheAccountIndex, + selectAccountIndex, +} from "../helpers/accountIndex.js"; +import { deriveAccountFromMnemonic } from "../helpers/accounts.js"; +import { toError } from "../helpers/errors.js"; +import { readBoolEnv, readRequiredEnv } from "../helpers/env.js"; + +type Done = (error?: Error) => void; + +type ArtilleryEvents = Readonly<{ + emit: (type: string, name: string, value: number) => void; +}>; + +type ArtilleryContext = { + vars?: Record; +}; + +/** + * Phase 1 test function: derive account from mnemonic + selected index and print: + * - index + * - full derivation path + * - address + * - private key (ONLY if PRINT_DERIVED_PRIVATE_KEY=true) + */ +export async function deriveAndPrint( + context: ArtilleryContext, + _events: ArtilleryEvents, + done?: Done +): Promise { + try { + const logger = getLogger(); + if (!context.vars) { + context.vars = {}; + } + const vars = context.vars; + + const mnemonic = readRequiredEnv("TEST_MNEMONIC"); + const selection: AccountIndexSelection = selectAccountIndex(vars); + cacheAccountIndex(vars, selection); + + const derived = deriveAccountFromMnemonic(mnemonic, selection.index); + vars.__accountAddress = derived.account.address; + vars.__derivationPath = derived.derivation.path; + + const printPk = readBoolEnv("PRINT_DERIVED_PRIVATE_KEY"); + if (printPk) { + logger.info( + { + index: selection.index, + path: derived.derivation.path, + address: derived.account.address, + privateKey: derived.privateKey, + }, + "derived account (PRINT_DERIVED_PRIVATE_KEY enabled)" + ); + } else { + logger.info( + { + index: selection.index, + path: derived.derivation.path, + address: derived.account.address, + }, + "derived account" + ); + } + + if (typeof done === "function") done(); + } catch (err) { + const error = toError(err); + if (typeof done === "function") return done(error); + throw error; + } +} diff --git a/src/sdk/msp.ts b/src/sdk/msp.ts deleted file mode 100644 index 5eee65d..0000000 --- a/src/sdk/msp.ts +++ /dev/null @@ -1,76 +0,0 @@ -import type { HttpClientConfig } from "@storagehub-sdk/core"; -import { MspClient, type Session } from "@storagehub-sdk/msp-client"; -import type { Env } from "../config.js"; -import type { Logger } from "pino"; -import { NETWORKS } from "../networks.js"; -import type { WalletClient } from "viem"; - -export type MspConnection = Readonly<{ - client: MspClient; - setSession: (s: Readonly) => void; -}>; - -export type MspAuthResult = Readonly<{ - session: Readonly; - address: `0x${string}`; -}>; - -export async function connectMsp( - env: Env, - logger?: Logger -): Promise { - const network = NETWORKS[env.network]; - const { msp } = network; - - const config = { - baseUrl: msp.baseUrl, - ...(typeof msp.timeoutMs === "number" ? { timeoutMs: msp.timeoutMs } : {}), - } satisfies HttpClientConfig; - - logger?.info({ baseUrl: config.baseUrl }, "msp connect"); - - let session: Readonly | undefined; - const sessionProvider = async () => session; - const client = await MspClient.connect(config, sessionProvider); - - const setSession = (s: Readonly): void => { - session = s; - }; - - return { client, setSession }; -} - -export async function validateMspConnection( - conn: MspConnection, - logger?: Logger -): Promise { - logger?.debug("msp getHealth"); - const health = await conn.client.info.getHealth(); - logger?.debug({ health }, "msp health response"); - logger?.info("msp health ok"); -} - -export async function authenticateWithSiwe( - conn: MspConnection, - env: Env, - walletClient: WalletClient, - logger?: Logger -): Promise { - const network = NETWORKS[env.network]; - const address = (await walletClient.getAddresses())[0]; - if (!address) { - throw new Error("WalletClient has no address"); - } - - logger?.info({ address }, "msp siwe start"); - const session = await conn.client.auth.SIWE( - walletClient, - network.msp.siweDomain, - network.msp.siweUri - ); - - conn.setSession(session); - - logger?.info({ address }, "msp siwe ok"); - return { session, address }; -} diff --git a/src/sdk/mspHttpConfig.ts b/src/sdk/mspHttpConfig.ts new file mode 100644 index 0000000..783b6ce --- /dev/null +++ b/src/sdk/mspHttpConfig.ts @@ -0,0 +1,17 @@ +import type { HttpClientConfig } from "@storagehub-sdk/core"; +import type { NetworkConfig } from "../networks.js"; + +/** + * Build the HttpClientConfig for connecting to the MSP service for a given network. + * Intentionally tiny so processors can stay "ultra clean". + */ +export function buildMspHttpClientConfig( + network: NetworkConfig +): HttpClientConfig { + return { + baseUrl: network.msp.baseUrl, + ...(typeof network.msp.timeoutMs === "number" + ? { timeoutMs: network.msp.timeoutMs } + : {}), + } satisfies HttpClientConfig; +} diff --git a/src/sdk/viemWallet.ts b/src/sdk/viemWallet.ts new file mode 100644 index 0000000..51e6106 --- /dev/null +++ b/src/sdk/viemWallet.ts @@ -0,0 +1,38 @@ +import { + createWalletClient, + defineChain, + http, + type Chain, + type WalletClient, +} from "viem"; +import type { Account } from "viem/accounts"; +import type { NetworkConfig } from "../networks.js"; + +export type ViemChainAndTransport = Readonly<{ + chain: Chain; + transportUrl: string; +}>; + +export function toViemChain(network: NetworkConfig): ViemChainAndTransport { + const chain = defineChain({ + id: network.chain.id, + name: network.chain.name, + network: network.name, + nativeCurrency: network.chain.nativeCurrency, + rpcUrls: { default: { http: [network.chain.evmRpcUrl] } }, + }); + + return { chain, transportUrl: network.chain.evmRpcUrl }; +} + +export function createViemWallet( + network: NetworkConfig, + account: Account +): WalletClient { + const { chain, transportUrl } = toViemChain(network); + return createWalletClient({ + chain, + account, + transport: http(transportUrl), + }); +} diff --git a/src/sdk/wallet.ts b/src/sdk/wallet.ts deleted file mode 100644 index 7476560..0000000 --- a/src/sdk/wallet.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { - createWalletClient, - defineChain, - http, - type Chain, - type WalletClient, -} from "viem"; -import { privateKeyToAccount } from "viem/accounts"; -import type { NetworkConfig } from "../networks.js"; - -export type WalletInitResult = Readonly<{ - chain: Chain; - walletClient: WalletClient; - address: `0x${string}`; -}>; - -export function to0xPrivateKey(raw: string): `0x${string}` { - return raw.startsWith("0x") - ? (raw as `0x${string}`) - : (`0x${raw}` as `0x${string}`); -} - -export function initWalletFromPrivateKey( - network: NetworkConfig, - privateKey: `0x${string}` -): WalletInitResult { - const chain = defineChain({ - id: network.chain.id, - name: network.chain.name, - network: network.name, - nativeCurrency: { name: "Token", symbol: "TOKEN", decimals: 18 }, - rpcUrls: { default: { http: [network.chain.evmRpcUrl] } }, - }); - - const account = privateKeyToAccount(privateKey); - const walletClient = createWalletClient({ - chain, - account, - transport: http(network.chain.evmRpcUrl), - }); - - return { chain, walletClient, address: account.address }; -} diff --git a/src/sdk/walletPool.ts b/src/sdk/walletPool.ts deleted file mode 100644 index 79ab65d..0000000 --- a/src/sdk/walletPool.ts +++ /dev/null @@ -1,75 +0,0 @@ -import type { NetworkConfig } from "../networks.js"; -import { loadPrivateKeys } from "../privateKeys.js"; -import { - initWalletFromPrivateKey, - to0xPrivateKey, - type WalletInitResult, -} from "./wallet.js"; - -export type WalletPool = Readonly<{ - sourcePath: string; - wallets: ReadonlyArray; - size: number; -}>; - -function readPoolSizeLimit(): number | undefined { - const raw = process.env.WALLET_POOL_SIZE; - if (!raw) { - return undefined; - } - const n = Number.parseInt(raw, 10); - if (!Number.isFinite(n) || n <= 0) { - throw new Error( - `Invalid WALLET_POOL_SIZE: ${raw} (expected positive integer)` - ); - } - return n; -} - -function buildPoolKey(network: NetworkConfig, sourcePath: string): string { - return `${network.name}::${sourcePath}`; -} - -const pools = new Map(); - -export function getWalletPool(network: NetworkConfig): WalletPool { - const { keys, sourcePath } = loadPrivateKeys(); - const key = buildPoolKey(network, sourcePath); - const existing = pools.get(key); - if (existing) { - return existing.pool; - } - - const limit = readPoolSizeLimit(); - const selected = limit ? keys.slice(0, Math.min(limit, keys.length)) : keys; - const wallets = selected.map((k) => - initWalletFromPrivateKey(network, to0xPrivateKey(k)) - ); - - const pool: WalletPool = { - sourcePath, - wallets, - size: wallets.length, - }; - - pools.set(key, { pool, nextIdx: 0 }); - return pool; -} - -export function nextWalletFromPool(network: NetworkConfig): WalletInitResult { - const pool = getWalletPool(network); - const key = buildPoolKey(network, pool.sourcePath); - const entry = pools.get(key); - if (!entry) { - // Should never happen because getWalletPool creates it - throw new Error("Wallet pool not initialized"); - } - - const idx = entry.nextIdx % pool.wallets.length; - const wallet = pool.wallets[idx]; - if (!wallet) { - throw new Error(`Wallet pool is empty (source: ${pool.sourcePath})`); - } - entry.nextIdx += 1; - return wallet; -}