Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/api.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { cidFromBytes } from "./common.js";
import { cidFromBytes } from "./cid_dag_metadata.js";
import { Binary } from '@polkadot-api/substrate-bindings';

export async function authorizeAccount(typedApi, sudoSigner, who, transactions, bytes) {
Expand Down
3 changes: 2 additions & 1 deletion examples/authorize_and_store.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { ApiPromise, WsProvider } from '@polkadot/api';
import { Keyring } from '@polkadot/keyring';
import { cryptoWaitReady } from '@polkadot/util-crypto';
import { create } from 'ipfs-http-client';
import { waitForNewBlock, cidFromBytes } from './common.js';
import { waitForNewBlock } from './common.js';
import { cidFromBytes } from "./cid_dag_metadata.js";

async function authorizeAccount(api, pair, who, transactions, bytes) {
const tx = api.tx.transactionStorage.authorizeAccount(who, transactions, bytes);
Expand Down
3 changes: 2 additions & 1 deletion examples/authorize_and_store_papi.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ import { createClient } from 'polkadot-api';
import { getWsProvider } from 'polkadot-api/ws-provider';
import { cryptoWaitReady } from '@polkadot/util-crypto';
import { authorizeAccount, fetchCid, store} from './api.js';
import { setupKeyringAndSigners, cidFromBytes } from './common.js';
import { setupKeyringAndSigners } from './common.js';
import { cidFromBytes } from "./cid_dag_metadata.js";
import { bulletin } from './.papi/descriptors/dist/index.mjs';

const NODE_WS = 'ws://localhost:10000';
Expand Down
3 changes: 2 additions & 1 deletion examples/authorize_and_store_papi_smoldot.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ import { createClient } from 'polkadot-api';
import { getSmProvider } from 'polkadot-api/sm-provider';
import { cryptoWaitReady } from '@polkadot/util-crypto';
import { authorizeAccount, fetchCid, store } from './api.js';
import { setupKeyringAndSigners, cidFromBytes } from './common.js';
import { setupKeyringAndSigners } from './common.js';
import { cidFromBytes } from "./cid_dag_metadata.js";
import { bulletin } from './.papi/descriptors/dist/index.mjs';

// Constants
Expand Down
78 changes: 78 additions & 0 deletions examples/cid_dag_metadata.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { blake2AsU8a } from '@polkadot/util-crypto';
import * as multihash from 'multiformats/hashes/digest';
import { CID } from 'multiformats/cid';
import * as dagPB from '@ipld/dag-pb';
import { UnixFS } from 'ipfs-unixfs';

/**
* Build a UnixFS DAG-PB file node from raw chunks.
*
* (By default with SHA2 multihash)
*/
export async function buildUnixFSDagPB(chunks, mhCode = 0x12) {
if (!chunks?.length) {
throw new Error('❌ buildUnixFSDag: chunks[] is empty')
}

// UnixFS blockSizes = sizes of child blocks
const blockSizes = chunks.map(c => c.len)

console.log(`🧩 Building UnixFS DAG from chunks:
• totalChunks: ${chunks.length}
• blockSizes: ${blockSizes.join(', ')}`)

// Build UnixFS file metadata (no inline data here)
const fileData = new UnixFS({
type: 'file',
blockSizes
})

// DAG-PB node: our file with chunk links
const dagNode = dagPB.prepare({
Data: fileData.marshal(),
Links: chunks.map(c => ({
Name: '',
Tsize: c.len,
Hash: c.cid
}))
})

// Encode DAG-PB
const dagBytes = dagPB.encode(dagNode)

// Hash DAG to produce CIDv1
const rootCid = await cidFromBytes(dagBytes, dagPB.code, mhCode)

console.log(`✅ DAG root CID: ${rootCid.toString()}`)

return { rootCid, dagBytes }
}

/**
* Create CID for data.
* Default to `0x55 (raw)` with blake2b_256 hash.
*
* 0xb220:
* - 0xb2 = the multihash algorithm family for BLAKE2b
* - 0x20 = the digest length in bytes (32 bytes = 256 bits)
*
* See: https://github.com/multiformats/multicodec/blob/master/table.csv
*/
export async function cidFromBytes(bytes, cidCodec = 0x55, mhCode = 0xb220) {
console.log(`[CID]: Using cidCodec: ${cidCodec} and mhCode: ${mhCode}`);
let mh;
switch (mhCode) {
case 0xb220: // blake2b-256
mh = multihash.create(mhCode, blake2AsU8a(bytes));
break;

default:
throw new Error("Unhandled multihash code: " + mhCode)
}
return CID.createV1(cidCodec, mh)
}

export function convertCid(cid, cidCodec) {
const mh = cid.multihash;
return CID.createV1(cidCodec, mh);
}
101 changes: 23 additions & 78 deletions examples/common.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
import { blake2AsU8a } from '@polkadot/util-crypto'
import * as multihash from 'multiformats/hashes/digest'
import { CID } from 'multiformats/cid'
import { Keyring } from '@polkadot/keyring';
import { getPolkadotSigner } from '@polkadot-api/signer';
import * as dagPB from '@ipld/dag-pb'
import { UnixFS } from 'ipfs-unixfs'
import { createCanvas } from "canvas";
import fs from "fs";
import assert from "assert";
Expand All @@ -15,35 +10,6 @@ export async function waitForNewBlock() {
return new Promise(resolve => setTimeout(resolve, 7000))
}

/**
* Create CID for data.
* Default to `0x55 (raw)` with blake2b_256 hash.
*
* 0xb220:
* - 0xb2 = the multihash algorithm family for BLAKE2b
* - 0x20 = the digest length in bytes (32 bytes = 256 bits)
*
* See: https://github.com/multiformats/multicodec/blob/master/table.csv
*/
export async function cidFromBytes(bytes, cidCodec = 0x55, mhCode = 0xb220) {
console.log(`[CID]: Using cidCodec: ${cidCodec} and mhCode: ${mhCode}`);
let mh;
switch (mhCode) {
case 0xb220: // blake2b-256
mh = multihash.create(mhCode, blake2AsU8a(bytes));
break;

default:
throw new Error("Unhandled multihash code: " + mhCode)
}
return CID.createV1(cidCodec, mh)
}

export function convertCid(cid, cidCodec) {
const mh = cid.multihash;
return CID.createV1(cidCodec, mh);
}

/**
* Creates a PAPI-compatible signer from a Keyring account
*/
Expand All @@ -70,50 +36,6 @@ export function setupKeyringAndSigners(sudoSeed, accountSeed) {
};
}

/**
* Build a UnixFS DAG-PB file node from raw chunks.
*
* (By default with SHA2 multihash)
*/
export async function buildUnixFSDagPB(chunks, mhCode = 0x12) {
if (!chunks?.length) {
throw new Error('❌ buildUnixFSDag: chunks[] is empty')
}

// UnixFS blockSizes = sizes of child blocks
const blockSizes = chunks.map(c => c.len)

console.log(`🧩 Building UnixFS DAG from chunks:
• totalChunks: ${chunks.length}
• blockSizes: ${blockSizes.join(', ')}`)

// Build UnixFS file metadata (no inline data here)
const fileData = new UnixFS({
type: 'file',
blockSizes
})

// DAG-PB node: our file with chunk links
const dagNode = dagPB.prepare({
Data: fileData.marshal(),
Links: chunks.map(c => ({
Name: '',
Tsize: c.len,
Hash: c.cid
}))
})

// Encode DAG-PB
const dagBytes = dagPB.encode(dagNode)

// Hash DAG to produce CIDv1
const rootCid = await cidFromBytes(dagBytes, dagPB.code, mhCode)

console.log(`✅ DAG root CID: ${rootCid.toString()}`)

return { rootCid, dagBytes }
}

/**
* Generates (dynamic) images based on the input text.
*/
Expand Down Expand Up @@ -173,3 +95,26 @@ export function filesAreEqual(path1, path2) {
assert.deepStrictEqual(data1[i], data2[i])
}
}

export async function fileToDisk(outputPath, fullBuffer) {
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(outputPath);
ws.write(fullBuffer);
ws.end();
ws.on('finish', resolve);
ws.on('error', reject);
});
console.log(`💾 File saved to: ${outputPath}`);
}

export class NonceManager {
constructor(initialNonce) {
this.nonce = initialNonce; // BN instance from api.query.system.account
}

getAndIncrement() {
const current = this.nonce;
this.nonce = this.nonce.addn(1); // increment BN
return current;
}
}
26 changes: 2 additions & 24 deletions examples/store_chunked_data.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@ import { create } from 'ipfs-http-client'
import * as dagPB from '@ipld/dag-pb'
import { TextDecoder } from 'util'
import assert from "assert";
import { waitForNewBlock, cidFromBytes, buildUnixFSDagPB, convertCid, generateTextImage, filesAreEqual } from './common.js'
import { waitForNewBlock, generateTextImage, filesAreEqual, fileToDisk, NonceManager } from './common.js'
import { fetchCid } from "./api.js";
import { buildUnixFSDagPB, cidFromBytes, convertCid } from "./cid_dag_metadata.js";

// ---- CONFIG ----
const WS_ENDPOINT = 'ws://127.0.0.1:10000' // Bulletin node
Expand Down Expand Up @@ -72,17 +73,6 @@ async function retrieveMetadata(ipfs, metadataCid) {
return metadataJson;
}

async function fileToDisk(outputPath, fullBuffer) {
await new Promise((resolve, reject) => {
const ws = fs.createWriteStream(outputPath);
ws.write(fullBuffer);
ws.end();
ws.on('finish', resolve);
ws.on('error', reject);
});
console.log(`💾 File saved to: ${outputPath}`);
}

/**
* Fetches all chunks listed in metdataJson, concatenates into a single file,
* and saves to disk (or returns as Buffer).
Expand Down Expand Up @@ -210,18 +200,6 @@ async function storeProof(api, sudoPair, pair, rootCID, dagFileBytes, nonceMgr,
return { rawDagCid }
}

class NonceManager {
constructor(initialNonce) {
this.nonce = initialNonce; // BN instance from api.query.system.account
}

getAndIncrement() {
const current = this.nonce;
this.nonce = this.nonce.addn(1); // increment BN
return current;
}
}

async function authorizeStorage(api, sudoPair, pair, nonceMgr) {
// Ensure enough quota.
const auth = await api.query.transactionStorage.authorizations({ "Account": pair.address});
Expand Down