diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index 9f7611e7..07088068 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -72,3 +72,7 @@ jobs: - name: Run authorize and store (PAPI, RPC node) working-directory: examples run: just run-authorize-and-store "ws" + + - name: Run store chunked data + DAG-PB (PJS-API, RPC node) + working-directory: examples + run: just run-store-chunked-data diff --git a/examples/api.js b/examples/api.js index 44113399..e5211ab8 100644 --- a/examples/api.js +++ b/examples/api.js @@ -1,7 +1,5 @@ import { cidFromBytes } from "./common.js"; import { Binary } from '@polkadot-api/substrate-bindings'; -import * as multihash from "multiformats/hashes/digest"; -import {blake2AsU8a} from "@polkadot/util-crypto"; export async function authorizeAccount(typedApi, sudoSigner, who, transactions, bytes) { console.log('Authorizing account...'); @@ -23,6 +21,7 @@ export async function store(typedApi, signer, data) { console.log('⬆️ Storing data with length=', data.length); const cid = await cidFromBytes(data); + // Convert data to Uint8Array then wrap in Binary for PAPI typed API const dataBytes = typeof data === 'string' ? new Uint8Array(Buffer.from(data)) : new Uint8Array(data); diff --git a/examples/authorize_and_store_papi.js b/examples/authorize_and_store_papi.js index 52f2b0c0..e9a6d015 100644 --- a/examples/authorize_and_store_papi.js +++ b/examples/authorize_and_store_papi.js @@ -51,7 +51,9 @@ async function main() { downloadedContent.toString(), '❌ dataToStore does not match downloadedContent!' ); - console.log(`✅ Verified content - test passed!`); + console.log(`✅ Verified content!`); + + console.log(`\n\n\n✅✅✅ Test passed! ✅✅✅`); resultCode = 0; } catch (error) { console.error("❌ Error:", error); diff --git a/examples/authorize_and_store_papi_smoldot.js b/examples/authorize_and_store_papi_smoldot.js index b4f091c9..13ab30d5 100644 --- a/examples/authorize_and_store_papi_smoldot.js +++ b/examples/authorize_and_store_papi_smoldot.js @@ -94,7 +94,9 @@ async function main() { downloadedContent.toString(), '❌ dataToStore does not match downloadedContent!' ); - console.log(`✅ Verified content - test passed!`); + console.log(`✅ Verified content!`); + + console.log(`\n\n\n✅✅✅ Test passed! ✅✅✅`); resultCode = 0; } catch (error) { console.error("❌ Error:", error); diff --git a/examples/common.js b/examples/common.js index 2b40f864..00365827 100644 --- a/examples/common.js +++ b/examples/common.js @@ -3,6 +3,11 @@ import * as multihash from 'multiformats/hashes/digest' import { CID } from 'multiformats/cid' import { Keyring } from '@polkadot/keyring'; import { getPolkadotSigner } from '@polkadot-api/signer'; +import * as dagPB from '@ipld/dag-pb' +import { UnixFS } from 'ipfs-unixfs' +import { createCanvas } from "canvas"; +import fs from "fs"; +import assert from "assert"; export async function waitForNewBlock() { // TODO: wait for a new block. @@ -64,3 +69,107 @@ export function setupKeyringAndSigners(sudoSeed, accountSeed) { whoAddress: whoAccount.address }; } + +/** + * Build a UnixFS DAG-PB file node from raw chunks. + * + * (By default with SHA2 multihash) + */ +export async function buildUnixFSDagPB(chunks, mhCode = 0x12) { + if (!chunks?.length) { + throw new Error('❌ buildUnixFSDag: chunks[] is empty') + } + + // UnixFS blockSizes = sizes of child blocks + const blockSizes = chunks.map(c => c.len) + + console.log(`🧩 Building UnixFS DAG from chunks: + • totalChunks: ${chunks.length} + • blockSizes: ${blockSizes.join(', ')}`) + + // Build UnixFS file metadata (no inline data here) + const fileData = new UnixFS({ + type: 'file', + blockSizes + }) + + // DAG-PB node: our file with chunk links + const dagNode = dagPB.prepare({ + Data: fileData.marshal(), + Links: chunks.map(c => ({ + Name: '', + Tsize: c.len, + Hash: c.cid + })) + }) + + // Encode DAG-PB + const dagBytes = dagPB.encode(dagNode) + + // Hash DAG to produce CIDv1 + const rootCid = await cidFromBytes(dagBytes, dagPB.code, mhCode) + + console.log(`✅ DAG root CID: ${rootCid.toString()}`) + + return { rootCid, dagBytes } +} + +/** + * Generates (dynamic) images based on the input text. + */ +export function generateTextImage(file, text, width = 800, height = 600) { + const canvas = createCanvas(width, height); + const ctx = canvas.getContext("2d"); + + // 🎨 Background + ctx.fillStyle = randomColor(); + ctx.fillRect(0, 0, width, height); + + // 🟠 Random shapes + for (let i = 0; i < 15; i++) { + ctx.beginPath(); + ctx.fillStyle = randomColor(); + ctx.arc( + Math.random() * width, + Math.random() * height, + Math.random() * 120, + 0, + Math.PI * 2 + ); + ctx.fill(); + } + + // ✍️ Draw your text + ctx.font = "bold 40px Sans"; + ctx.fillStyle = "white"; + ctx.textAlign = "center"; + ctx.textBaseline = "middle"; + + // Add text with shadow for readability + ctx.shadowColor = "black"; + ctx.shadowBlur = 8; + + ctx.fillText(text, width / 2, height / 2); + + let jpegBytes = canvas.toBuffer("image/jpeg"); + fs.writeFileSync(file, jpegBytes); + console.log("Saved to file:", file); +} + +function randomColor() { + return `rgb(${rand255()}, ${rand255()}, ${rand255()})`; +} + +function rand255() { + return Math.floor(Math.random() * 256); +} + +export function filesAreEqual(path1, path2) { + const data1 = fs.readFileSync(path1); + const data2 = fs.readFileSync(path2); + assert.deepStrictEqual(data1.length, data2.length) + + for (let i = 0; i < data1.length; i++) { + assert.deepStrictEqual(data1[i], data2[i]) + } +} diff --git a/examples/justfile b/examples/justfile index 6190f3a7..af252ea8 100644 --- a/examples/justfile +++ b/examples/justfile @@ -260,3 +260,25 @@ run-authorize-and-store mode="ws": build npm-install just teardown-services exit $EXAMPLE_EXIT +# Run store chunked data example with Docker IPFS +run-store-chunked-data: build npm-install + #!/usr/bin/env bash + set -e + + echo "🚀 Starting store chunked data + DAG-PB workflow test ..." + echo "" + + just setup-services + node store_chunked_data.js + EXAMPLE_EXIT=$? + + echo "" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + if [ $EXAMPLE_EXIT -eq 0 ]; then + echo "✅ Example completed successfully!" + else + echo "❌ Example failed with exit code $EXAMPLE_EXIT" + fi + + just teardown-services + exit $EXAMPLE_EXIT diff --git a/examples/package.json b/examples/package.json index 59846fba..4bad5202 100644 --- a/examples/package.json +++ b/examples/package.json @@ -11,10 +11,11 @@ "@polkadot/keyring": "^13.5.8", "@polkadot/util": "^13.5.8", "@polkadot/util-crypto": "^13.5.8", + "canvas": "^3.2.0", "fs": "^0.0.1-security", "ipfs-http-client": "^60.0.1", "multiformats": "^13.4.1", - "polkadot-api": "^1.20.6", + "polkadot-api": "^1.22.0", "smoldot": "^2.0.40", "ws": "^8.18.0" }, diff --git a/examples/store_chunked_data.js b/examples/store_chunked_data.js index 417c36a8..c77bd4ea 100644 --- a/examples/store_chunked_data.js +++ b/examples/store_chunked_data.js @@ -1,24 +1,22 @@ import fs from 'fs' import { ApiPromise, WsProvider } from '@polkadot/api' import { Keyring } from '@polkadot/keyring' -import { cryptoWaitReady, blake2AsU8a } from '@polkadot/util-crypto' -import * as multihash from 'multiformats/hashes/digest' +import { cryptoWaitReady } from '@polkadot/util-crypto' import { CID } from 'multiformats/cid' import { create } from 'ipfs-http-client' import * as dagPB from '@ipld/dag-pb' -import * as sha256 from 'multiformats/hashes/sha2'; -import { UnixFS } from 'ipfs-unixfs' import { TextDecoder } from 'util' import assert from "assert"; -import { waitForNewBlock, cidFromBytes } from './common.js' +import { waitForNewBlock, cidFromBytes, buildUnixFSDagPB, convertCid, generateTextImage, filesAreEqual } from './common.js' +import { fetchCid } from "./api.js"; // ---- CONFIG ---- const WS_ENDPOINT = 'ws://127.0.0.1:10000' // Bulletin node const IPFS_API = 'http://127.0.0.1:5001' // Local IPFS daemon const HTTP_IPFS_API = 'http://127.0.0.1:8080' // Local IPFS HTTP gateway -const FILE_PATH = './picture.svg' -const OUT_PATH = './retrieved_picture.bin' -const OUT_PATH2 = './retrieved_picture.bin2' +const FILE_PATH = './random_picture.jpg' +const OUT_1_PATH = './retrieved_random_picture1.jpg' +const OUT_2_PATH = './retrieved_random_picture2.jpg' const CHUNK_SIZE = 4 * 1024 // 4 KB // ----------------- @@ -45,7 +43,7 @@ async function storeChunkedFile(api, pair, filePath, nonceMgr) { const chunks = [] for (let i = 0; i < fileData.length; i += CHUNK_SIZE) { const chunk = fileData.subarray(i, i + CHUNK_SIZE) - const cid = cidFromBytes(chunk) + const cid = await cidFromBytes(chunk) chunks.push({cid, bytes: to_hex(chunk), len: chunk.length}) } console.log(`✂️ Split into ${chunks.length} chunks`) @@ -101,7 +99,7 @@ async function retrieveFileForMetadata(ipfs, metadataJson, outputPath) { const buffers = []; for (const chunk of metadataJson.chunks) { const chunkCid = CID.parse(chunk.cid); - console.log(`⬇️ Fetching chunk: ${chunkCid.toString()} (len: ${chunk.length})`); + console.log(`⬇️ Fetching chunk: ${chunkCid.toString()} (len: ${chunk.len})`); const block = await ipfs.block.get(chunkCid); buffers.push(block); } @@ -132,7 +130,7 @@ export async function storeMetadata(api, pair, chunks, nonceMgr) { chunks: chunks.map((c, i) => ({ index: i, cid: c.cid.toString(), - length: c.len + len: c.len })) }; @@ -140,7 +138,7 @@ export async function storeMetadata(api, pair, chunks, nonceMgr) { console.log(`🧾 Metadata size: ${jsonBytes.length} bytes`) // 2️⃣ Compute CID manually (same as store() function) - const metadataCid = cidFromBytes(jsonBytes) + const metadataCid = await cidFromBytes(jsonBytes) console.log('🧩 Metadata CID:', metadataCid.toString()) // 3️⃣ Store JSON bytes in Bulletin @@ -156,36 +154,12 @@ export async function storeMetadata(api, pair, chunks, nonceMgr) { * @param {Object} metadataJson - JSON object containing chunks [{ cid, length }] * @returns {Promise<{ rootCid: CID, dagBytes: Uint8Array }>} */ -export async function buildUnixFSDag(metadataJson) { +async function buildUnixFSDag(metadataJson, mhCode = 0x12) { // Extract chunk info const chunks = metadataJson.chunks || [] if (!chunks.length) throw new Error('❌ metadataJson.chunks is empty') - // Prepare UnixFS file metadata - const blockSizes = chunks.map(c => BigInt(c.length)) - const fileData = new UnixFS({ type: 'file', blockSizes }) - - console.log(`\n🧩 Building UnixFS DAG: - • totalChunks: ${chunks.length} - • blockSizes: ${blockSizes.join(', ')}`) - - // Prepare DAG-PB node - const dagNode = dagPB.prepare({ - Data: fileData.marshal(), - Links: chunks.map(c => ({ - Name: '', - Tsize: c.length, - Hash: c.cid - })) - }) - - // Encode and hash to create dag root CID. - const dagBytes = dagPB.encode(dagNode) - const dagHash = await sha256.sha256.digest(dagBytes) - const rootCid = CID.createV1(dagPB.code, dagHash) - - console.log(`✅ Built DAG root CID: ${rootCid.toString()}`) - return { rootCid, dagBytes } + return await buildUnixFSDagPB(chunks, mhCode); } /** @@ -195,7 +169,7 @@ export async function buildUnixFSDag(metadataJson) { * @param {CID|string} proofCid - CID of the stored DAG-PB node * @returns {Promise<{ dagNode: any, rootCid: CID }>} */ -export async function reconstructDagFromProof(ipfs, proofCid, expectedRootCid) { +export async function reconstructDagFromProof(ipfs, expectedRootCid, proofCid, mhCode = 0x12) { console.log(`📦 Fetching DAG bytes for proof CID: ${proofCid.toString()}`); // 1️⃣ Read the raw block bytes from IPFS @@ -207,8 +181,7 @@ export async function reconstructDagFromProof(ipfs, proofCid, expectedRootCid) { console.log('📄 Decoded DAG node:', dagNode); // 3️⃣ Recalculate root CID (same as IPFS does) - const hash = await sha256.sha256.digest(dagBytes); - const rootCid = CID.createV1(dagPB.code, hash); + const rootCid = await cidFromBytes(dagBytes, dagPB.code, mhCode); assert.strictEqual( rootCid.toString(), @@ -221,7 +194,7 @@ export async function reconstructDagFromProof(ipfs, proofCid, expectedRootCid) { async function storeProof(api, sudoPair, pair, rootCID, dagFileBytes, nonceMgr, sudoNonceMgr) { console.log(`🧩 Storing proof for rootCID: ${rootCID.toString()} to the Bulletin`); // Compute CID manually (same as store() function) - const proofCid = cidFromBytes(dagFileBytes) + const rawDagCid = await cidFromBytes(dagFileBytes) // Store DAG bytes in Bulletin const storeTx = api.tx.transactionStorage.store(to_hex(dagFileBytes)); @@ -229,12 +202,12 @@ async function storeProof(api, sudoPair, pair, rootCID, dagFileBytes, nonceMgr, console.log('📤 DAG proof "bytes" stored in Bulletin:', storeResult.toHuman?.()) // This can be a serious pallet, this is just a demonstration. - const proof = `ProofCid: ${proofCid.toString()} -> rootCID: ${rootCID.toString()}`; + const proof = `ProofCid: ${rawDagCid.toString()} -> rootCID: ${rootCID.toString()}`; const proofTx = api.tx.system.remark(proof); const sudoTx = api.tx.sudo.sudo(proofTx); const proofResult = await sudoTx.signAndSend(sudoPair, { nonce: sudoNonceMgr.getAndIncrement()}); console.log(`📤 DAG proof - "${proof}" - stored in Bulletin:`, proofResult.toHuman?.()) - return { proofCid } + return { rawDagCid } } class NonceManager { @@ -249,18 +222,6 @@ class NonceManager { } } -function filesAreEqual(path1, path2) { - const data1 = fs.readFileSync(path1); - const data2 = fs.readFileSync(path2); - - if (data1.length !== data2.length) return false; - - for (let i = 0; i < data1.length; i++) { - if (data1[i] !== data2[i]) return false; - } - return true; -} - async function authorizeStorage(api, sudoPair, pair, nonceMgr) { // Ensure enough quota. const auth = await api.query.transactionStorage.authorizations({ "Account": pair.address}); @@ -287,84 +248,99 @@ async function authorizeStorage(api, sudoPair, pair, nonceMgr) { async function main() { await cryptoWaitReady() - if (fs.existsSync(OUT_PATH)) { - fs.unlinkSync(OUT_PATH); - console.log(`File ${OUT_PATH} removed.`); - } - if (fs.existsSync(OUT_PATH2)) { - fs.unlinkSync(OUT_PATH2); - console.log(`File ${OUT_PATH2} removed.`); - } - - console.log('🛰 Connecting to Bulletin node...') - const provider = new WsProvider(WS_ENDPOINT) - const api = await ApiPromise.create({ provider }) - await api.isReady - const ipfs = create({ url: IPFS_API }); - console.log('✅ Connected to Bulletin node') - - const keyring = new Keyring({ type: 'sr25519' }) - const pair = keyring.addFromUri('//Alice') - const sudoPair = keyring.addFromUri('//Alice') - let { nonce } = await api.query.system.account(pair.address); - const nonceMgr = new NonceManager(nonce); - console.log(`💳 Using account: ${pair.address}, nonce: ${nonce}`) - - // Make sure an account can store data. - await authorizeStorage(api, sudoPair, pair, nonceMgr); - - // Read the file, chunk it, store in Bulletin and return CIDs. - let { chunks} = await storeChunkedFile(api, pair, FILE_PATH, nonceMgr); - // Store metadata file with all the CIDs to the Bulletin. - const { metadataCid} = await storeMetadata(api, pair, chunks, nonceMgr); - await waitForNewBlock(); - //////////////////////////////////////////////////////////////////////////////////// - // 1. example manually retrieve the picture (no IPFS DAG feature) - const metadataJson = await retrieveMetadata(ipfs, metadataCid) - await retrieveFileForMetadata(ipfs, metadataJson, OUT_PATH); - filesAreEqual(FILE_PATH, OUT_PATH); - - //////////////////////////////////////////////////////////////////////////////////// - // 2. example download picture by rootCID with IPFS DAG feature and HTTP gateway. - // Demonstrates how to download chunked content by one root CID. - // Basically, just take the `metadataJson` with already stored chunks and convert it to the DAG-PB format. - const { rootCid, dagBytes } = await buildUnixFSDag(metadataJson) - - // Store DAG proof to the Bulletin. - let {proofCid} = await storeProof(api, sudoPair, pair, rootCid, Buffer.from(dagBytes), nonceMgr, nonceMgr); - await waitForNewBlock(); - await reconstructDagFromProof(ipfs, proofCid, rootCid); - - // Store DAG into IPFS. - // (Alternative: ipfs.dag.put(dagNode, {storeCodec: 'dag-pb', hashAlg: 'sha2-256', pin: true })) - const dagCid = await ipfs.block.put(dagBytes, { - format: 'dag-pb', - mhtype: 'sha2-256' - }) - assert.strictEqual( - rootCid.toString(), - dagCid.toString(), - '❌ DAG CID does not match expected root CID' - ); - console.log('🧱 DAG stored on IPFS with CID:', dagCid.toString()) - console.log('\n🌐 Try opening in browser:') - console.log(` http://127.0.0.1:8080/ipfs/${rootCid.toString()}`) - console.log(' (You’ll see binary content since this is an image)') - - // Download the content from IPFS HTTP gateway - const contentUrl = `${HTTP_IPFS_API}/ipfs/${dagCid.toString()}`; - console.log('⬇️ Downloading the full content (no chunking) by rootCID from url: ', contentUrl); - const res = await fetch(contentUrl); - if (!res.ok) throw new Error(`HTTP error ${res.status}`); - const fullBuffer = Buffer.from(await res.arrayBuffer()); - console.log(`✅ Reconstructed file size: ${fullBuffer.length} bytes`); - await fileToDisk(OUT_PATH2, fullBuffer); - filesAreEqual(FILE_PATH, OUT_PATH2); - filesAreEqual(OUT_PATH2, OUT_PATH); - - console.log(`\n\n\n✅✅✅ Passed all tests ✅✅✅`); - await api.disconnect() + let api, resultCode; + try { + if (fs.existsSync(OUT_1_PATH)) { + fs.unlinkSync(OUT_1_PATH); + console.log(`File ${OUT_1_PATH} removed.`); + } + if (fs.existsSync(OUT_2_PATH)) { + fs.unlinkSync(OUT_2_PATH); + console.log(`File ${OUT_2_PATH} removed.`); + } + if (fs.existsSync(FILE_PATH)) { + fs.unlinkSync(FILE_PATH); + console.log(`File ${FILE_PATH} removed.`); + } + generateTextImage(FILE_PATH, "Hello, Bulletin with PAPI - " + new Date().toString(), 250, 250); + + console.log('🛰 Connecting to Bulletin node...') + const provider = new WsProvider(WS_ENDPOINT) + api = await ApiPromise.create({ provider }) + await api.isReady + const ipfs = create({ url: IPFS_API }); + console.log('✅ Connected to Bulletin node') + + const keyring = new Keyring({ type: 'sr25519' }) + const pair = keyring.addFromUri('//Alice') + const sudoPair = keyring.addFromUri('//Alice') + let { nonce } = await api.query.system.account(pair.address); + const nonceMgr = new NonceManager(nonce); + console.log(`💳 Using account: ${pair.address}, nonce: ${nonce}`) + + // Make sure an account can store data. + await authorizeStorage(api, sudoPair, pair, nonceMgr); + + // Read the file, chunk it, store in Bulletin and return CIDs. + let { chunks} = await storeChunkedFile(api, pair, FILE_PATH, nonceMgr); + // Store metadata file with all the CIDs to the Bulletin. + const { metadataCid} = await storeMetadata(api, pair, chunks, nonceMgr); + await waitForNewBlock(); + + //////////////////////////////////////////////////////////////////////////////////// + // 1. example manually retrieve the picture (no IPFS DAG feature) + const metadataJson = await retrieveMetadata(ipfs, metadataCid) + await retrieveFileForMetadata(ipfs, metadataJson, OUT_1_PATH); + filesAreEqual(FILE_PATH, OUT_1_PATH); + + //////////////////////////////////////////////////////////////////////////////////// + // 2. example download picture by rootCID with IPFS DAG feature and HTTP gateway. + // Demonstrates how to download chunked content by one root CID. + // Basically, just take the `metadataJson` with already stored chunks and convert it to the DAG-PB format. + const { rootCid, dagBytes } = await buildUnixFSDag(metadataJson, 0xb220) + + // Store DAG and proof to the Bulletin. + let { rawDagCid } = await storeProof(api, sudoPair, pair, rootCid, Buffer.from(dagBytes), nonceMgr, nonceMgr); + await waitForNewBlock(); + await reconstructDagFromProof(ipfs, rootCid, rawDagCid, 0xb220); + + // Store DAG into IPFS. + assert.strictEqual( + rootCid.toString(), + convertCid(rawDagCid, dagPB.code).toString(), + '❌ DAG CID does not match expected root CID' + ); + console.log('🧱 DAG stored on IPFS with CID:', rawDagCid.toString()) + console.log('\n🌐 Try opening in browser:') + console.log(` http://127.0.0.1:8080/ipfs/${rootCid.toString()}`) + console.log(' (You’ll see binary content since this is an image)') + console.log(` http://127.0.0.1:8080/ipfs/${rawDagCid.toString()}`) + console.log(' (You’ll see the encoded DAG descriptor content)') + + // Download the content from IPFS HTTP gateway + const fullBuffer = await fetchCid(HTTP_IPFS_API, rootCid); + console.log(`✅ Reconstructed file size: ${fullBuffer.length} bytes`); + await fileToDisk(OUT_2_PATH, fullBuffer); + filesAreEqual(FILE_PATH, OUT_1_PATH); + filesAreEqual(OUT_1_PATH, OUT_2_PATH); + + // Download the DAG descriptor raw file itself. + const downloadedDagBytes = await fetchCid(HTTP_IPFS_API, rawDagCid); + console.log(`✅ Downloaded DAG raw descriptor file size: ${downloadedDagBytes.length} bytes`); + assert.deepStrictEqual(downloadedDagBytes, Buffer.from(dagBytes)); + const dagNode = dagPB.decode(downloadedDagBytes); + console.log('📄 Decoded DAG node:', dagNode); + + console.log(`\n\n\n✅✅✅ Test passed! ✅✅✅`); + resultCode = 0; + } catch (error) { + console.error("❌ Error:", error); + resultCode = 1; + } finally { + if (api) api.disconnect(); + process.exit(resultCode); + } } -main().catch(console.error) +await main();