diff --git a/examples/api.js b/examples/api.js index e5211ab8..40fe1c39 100644 --- a/examples/api.js +++ b/examples/api.js @@ -1,4 +1,4 @@ -import { cidFromBytes } from "./common.js"; +import { cidFromBytes } from "./cid_dag_metadata.js"; import { Binary } from '@polkadot-api/substrate-bindings'; export async function authorizeAccount(typedApi, sudoSigner, who, transactions, bytes) { diff --git a/examples/authorize_and_store.js b/examples/authorize_and_store.js index 3581c5e3..431efa06 100644 --- a/examples/authorize_and_store.js +++ b/examples/authorize_and_store.js @@ -2,7 +2,8 @@ import { ApiPromise, WsProvider } from '@polkadot/api'; import { Keyring } from '@polkadot/keyring'; import { cryptoWaitReady } from '@polkadot/util-crypto'; import { create } from 'ipfs-http-client'; -import { waitForNewBlock, cidFromBytes } from './common.js'; +import { waitForNewBlock } from './common.js'; +import { cidFromBytes } from "./cid_dag_metadata.js"; async function authorizeAccount(api, pair, who, transactions, bytes) { const tx = api.tx.transactionStorage.authorizeAccount(who, transactions, bytes); diff --git a/examples/authorize_and_store_papi.js b/examples/authorize_and_store_papi.js index e9a6d015..494999b5 100644 --- a/examples/authorize_and_store_papi.js +++ b/examples/authorize_and_store_papi.js @@ -3,7 +3,8 @@ import { createClient } from 'polkadot-api'; import { getWsProvider } from 'polkadot-api/ws-provider'; import { cryptoWaitReady } from '@polkadot/util-crypto'; import { authorizeAccount, fetchCid, store} from './api.js'; -import { setupKeyringAndSigners, cidFromBytes } from './common.js'; +import { setupKeyringAndSigners } from './common.js'; +import { cidFromBytes } from "./cid_dag_metadata.js"; import { bulletin } from './.papi/descriptors/dist/index.mjs'; const NODE_WS = 'ws://localhost:10000'; diff --git a/examples/authorize_and_store_papi_smoldot.js b/examples/authorize_and_store_papi_smoldot.js index 13ab30d5..bf805ac8 100644 --- a/examples/authorize_and_store_papi_smoldot.js +++ b/examples/authorize_and_store_papi_smoldot.js @@ -5,7 +5,8 @@ import { createClient } from 'polkadot-api'; import { getSmProvider } from 'polkadot-api/sm-provider'; import { cryptoWaitReady } from '@polkadot/util-crypto'; import { authorizeAccount, fetchCid, store } from './api.js'; -import { setupKeyringAndSigners, cidFromBytes } from './common.js'; +import { setupKeyringAndSigners } from './common.js'; +import { cidFromBytes } from "./cid_dag_metadata.js"; import { bulletin } from './.papi/descriptors/dist/index.mjs'; // Constants diff --git a/examples/cid_dag_metadata.js b/examples/cid_dag_metadata.js new file mode 100644 index 00000000..67c5c724 --- /dev/null +++ b/examples/cid_dag_metadata.js @@ -0,0 +1,78 @@ +import { blake2AsU8a } from '@polkadot/util-crypto'; +import * as multihash from 'multiformats/hashes/digest'; +import { CID } from 'multiformats/cid'; +import * as dagPB from '@ipld/dag-pb'; +import { UnixFS } from 'ipfs-unixfs'; + +/** + * Build a UnixFS DAG-PB file node from raw chunks. + * + * (By default with SHA2 multihash) + */ +export async function buildUnixFSDagPB(chunks, mhCode = 0x12) { + if (!chunks?.length) { + throw new Error('❌ buildUnixFSDag: chunks[] is empty') + } + + // UnixFS blockSizes = sizes of child blocks + const blockSizes = chunks.map(c => c.len) + + console.log(`🧩 Building UnixFS DAG from chunks: + • totalChunks: ${chunks.length} + • blockSizes: ${blockSizes.join(', ')}`) + + // Build UnixFS file metadata (no inline data here) + const fileData = new UnixFS({ + type: 'file', + blockSizes + }) + + // DAG-PB node: our file with chunk links + const dagNode = dagPB.prepare({ + Data: fileData.marshal(), + Links: chunks.map(c => ({ + Name: '', + Tsize: c.len, + Hash: c.cid + })) + }) + + // Encode DAG-PB + const dagBytes = dagPB.encode(dagNode) + + // Hash DAG to produce CIDv1 + const rootCid = await cidFromBytes(dagBytes, dagPB.code, mhCode) + + console.log(`✅ DAG root CID: ${rootCid.toString()}`) + + return { rootCid, dagBytes } +} + +/** + * Create CID for data. + * Default to `0x55 (raw)` with blake2b_256 hash. + * + * 0xb220: + * - 0xb2 = the multihash algorithm family for BLAKE2b + * - 0x20 = the digest length in bytes (32 bytes = 256 bits) + * + * See: https://github.com/multiformats/multicodec/blob/master/table.csv + */ +export async function cidFromBytes(bytes, cidCodec = 0x55, mhCode = 0xb220) { + console.log(`[CID]: Using cidCodec: ${cidCodec} and mhCode: ${mhCode}`); + let mh; + switch (mhCode) { + case 0xb220: // blake2b-256 + mh = multihash.create(mhCode, blake2AsU8a(bytes)); + break; + + default: + throw new Error("Unhandled multihash code: " + mhCode) + } + return CID.createV1(cidCodec, mh) +} + +export function convertCid(cid, cidCodec) { + const mh = cid.multihash; + return CID.createV1(cidCodec, mh); +} diff --git a/examples/common.js b/examples/common.js index 00365827..60a8da30 100644 --- a/examples/common.js +++ b/examples/common.js @@ -1,10 +1,5 @@ -import { blake2AsU8a } from '@polkadot/util-crypto' -import * as multihash from 'multiformats/hashes/digest' -import { CID } from 'multiformats/cid' import { Keyring } from '@polkadot/keyring'; import { getPolkadotSigner } from '@polkadot-api/signer'; -import * as dagPB from '@ipld/dag-pb' -import { UnixFS } from 'ipfs-unixfs' import { createCanvas } from "canvas"; import fs from "fs"; import assert from "assert"; @@ -15,35 +10,6 @@ export async function waitForNewBlock() { return new Promise(resolve => setTimeout(resolve, 7000)) } -/** - * Create CID for data. - * Default to `0x55 (raw)` with blake2b_256 hash. - * - * 0xb220: - * - 0xb2 = the multihash algorithm family for BLAKE2b - * - 0x20 = the digest length in bytes (32 bytes = 256 bits) - * - * See: https://github.com/multiformats/multicodec/blob/master/table.csv - */ -export async function cidFromBytes(bytes, cidCodec = 0x55, mhCode = 0xb220) { - console.log(`[CID]: Using cidCodec: ${cidCodec} and mhCode: ${mhCode}`); - let mh; - switch (mhCode) { - case 0xb220: // blake2b-256 - mh = multihash.create(mhCode, blake2AsU8a(bytes)); - break; - - default: - throw new Error("Unhandled multihash code: " + mhCode) - } - return CID.createV1(cidCodec, mh) -} - -export function convertCid(cid, cidCodec) { - const mh = cid.multihash; - return CID.createV1(cidCodec, mh); -} - /** * Creates a PAPI-compatible signer from a Keyring account */ @@ -70,50 +36,6 @@ export function setupKeyringAndSigners(sudoSeed, accountSeed) { }; } -/** - * Build a UnixFS DAG-PB file node from raw chunks. - * - * (By default with SHA2 multihash) - */ -export async function buildUnixFSDagPB(chunks, mhCode = 0x12) { - if (!chunks?.length) { - throw new Error('❌ buildUnixFSDag: chunks[] is empty') - } - - // UnixFS blockSizes = sizes of child blocks - const blockSizes = chunks.map(c => c.len) - - console.log(`🧩 Building UnixFS DAG from chunks: - • totalChunks: ${chunks.length} - • blockSizes: ${blockSizes.join(', ')}`) - - // Build UnixFS file metadata (no inline data here) - const fileData = new UnixFS({ - type: 'file', - blockSizes - }) - - // DAG-PB node: our file with chunk links - const dagNode = dagPB.prepare({ - Data: fileData.marshal(), - Links: chunks.map(c => ({ - Name: '', - Tsize: c.len, - Hash: c.cid - })) - }) - - // Encode DAG-PB - const dagBytes = dagPB.encode(dagNode) - - // Hash DAG to produce CIDv1 - const rootCid = await cidFromBytes(dagBytes, dagPB.code, mhCode) - - console.log(`✅ DAG root CID: ${rootCid.toString()}`) - - return { rootCid, dagBytes } -} - /** * Generates (dynamic) images based on the input text. */ @@ -173,3 +95,26 @@ export function filesAreEqual(path1, path2) { assert.deepStrictEqual(data1[i], data2[i]) } } + +export async function fileToDisk(outputPath, fullBuffer) { + await new Promise((resolve, reject) => { + const ws = fs.createWriteStream(outputPath); + ws.write(fullBuffer); + ws.end(); + ws.on('finish', resolve); + ws.on('error', reject); + }); + console.log(`💾 File saved to: ${outputPath}`); +} + +export class NonceManager { + constructor(initialNonce) { + this.nonce = initialNonce; // BN instance from api.query.system.account + } + + getAndIncrement() { + const current = this.nonce; + this.nonce = this.nonce.addn(1); // increment BN + return current; + } +} diff --git a/examples/store_chunked_data.js b/examples/store_chunked_data.js index c77bd4ea..a72d55e3 100644 --- a/examples/store_chunked_data.js +++ b/examples/store_chunked_data.js @@ -7,8 +7,9 @@ import { create } from 'ipfs-http-client' import * as dagPB from '@ipld/dag-pb' import { TextDecoder } from 'util' import assert from "assert"; -import { waitForNewBlock, cidFromBytes, buildUnixFSDagPB, convertCid, generateTextImage, filesAreEqual } from './common.js' +import { waitForNewBlock, generateTextImage, filesAreEqual, fileToDisk, NonceManager } from './common.js' import { fetchCid } from "./api.js"; +import { buildUnixFSDagPB, cidFromBytes, convertCid } from "./cid_dag_metadata.js"; // ---- CONFIG ---- const WS_ENDPOINT = 'ws://127.0.0.1:10000' // Bulletin node @@ -72,17 +73,6 @@ async function retrieveMetadata(ipfs, metadataCid) { return metadataJson; } -async function fileToDisk(outputPath, fullBuffer) { - await new Promise((resolve, reject) => { - const ws = fs.createWriteStream(outputPath); - ws.write(fullBuffer); - ws.end(); - ws.on('finish', resolve); - ws.on('error', reject); - }); - console.log(`💾 File saved to: ${outputPath}`); -} - /** * Fetches all chunks listed in metdataJson, concatenates into a single file, * and saves to disk (or returns as Buffer). @@ -210,18 +200,6 @@ async function storeProof(api, sudoPair, pair, rootCID, dagFileBytes, nonceMgr, return { rawDagCid } } -class NonceManager { - constructor(initialNonce) { - this.nonce = initialNonce; // BN instance from api.query.system.account - } - - getAndIncrement() { - const current = this.nonce; - this.nonce = this.nonce.addn(1); // increment BN - return current; - } -} - async function authorizeStorage(api, sudoPair, pair, nonceMgr) { // Ensure enough quota. const auth = await api.query.transactionStorage.authorizations({ "Account": pair.address});