Skip to content

Commit 02e09b3

Browse files
committed
merged with main
2 parents a56f58e + aa69187 commit 02e09b3

File tree

8 files changed

+255
-140
lines changed

8 files changed

+255
-140
lines changed

β€Ž.github/workflows/integration-test.ymlβ€Ž

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,4 +89,8 @@ jobs:
8989

9090
- name: Run authorize and store (PAPI, RPC node)
9191
working-directory: examples
92-
run: just run-authorize-and-store "ws" "${{ matrix.runtime.package }}"
92+
run: just run-authorize-and-store "ws"
93+
94+
- name: Run store chunked data + DAG-PB (PJS-API, RPC node)
95+
working-directory: examples
96+
run: just run-store-chunked-data

β€Žexamples/api.jsβ€Ž

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
import { cidFromBytes } from "./common.js";
22
import { Binary } from '@polkadot-api/substrate-bindings';
3-
import * as multihash from "multiformats/hashes/digest";
4-
import {blake2AsU8a} from "@polkadot/util-crypto";
53

64
export async function authorizeAccount(typedApi, sudoSigner, who, transactions, bytes) {
75
console.log('Authorizing account...');
@@ -23,6 +21,7 @@ export async function store(typedApi, signer, data) {
2321
console.log('⬆️ Storing data with length=', data.length);
2422
const cid = await cidFromBytes(data);
2523

24+
// Convert data to Uint8Array then wrap in Binary for PAPI typed API
2625
const dataBytes = typeof data === 'string' ?
2726
new Uint8Array(Buffer.from(data)) :
2827
new Uint8Array(data);

β€Žexamples/authorize_and_store_papi.jsβ€Ž

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,9 @@ async function main() {
5151
downloadedContent.toString(),
5252
'❌ dataToStore does not match downloadedContent!'
5353
);
54-
console.log(`βœ… Verified content - test passed!`);
54+
console.log(`βœ… Verified content!`);
55+
56+
console.log(`\n\n\nβœ…βœ…βœ… Test passed! βœ…βœ…βœ…`);
5557
resultCode = 0;
5658
} catch (error) {
5759
console.error("❌ Error:", error);

β€Žexamples/authorize_and_store_papi_smoldot.jsβ€Ž

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,9 @@ async function main() {
9494
downloadedContent.toString(),
9595
'❌ dataToStore does not match downloadedContent!'
9696
);
97-
console.log(`βœ… Verified content - test passed!`);
97+
console.log(`βœ… Verified content!`);
98+
99+
console.log(`\n\n\nβœ…βœ…βœ… Test passed! βœ…βœ…βœ…`);
98100
resultCode = 0;
99101
} catch (error) {
100102
console.error("❌ Error:", error);

β€Žexamples/common.jsβ€Ž

Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,11 @@ import * as multihash from 'multiformats/hashes/digest'
33
import { CID } from 'multiformats/cid'
44
import { Keyring } from '@polkadot/keyring';
55
import { getPolkadotSigner } from '@polkadot-api/signer';
6+
import * as dagPB from '@ipld/dag-pb'
7+
import { UnixFS } from 'ipfs-unixfs'
8+
import { createCanvas } from "canvas";
9+
import fs from "fs";
10+
import assert from "assert";
611

712
export async function waitForNewBlock() {
813
// TODO: wait for a new block.
@@ -64,3 +69,107 @@ export function setupKeyringAndSigners(sudoSeed, accountSeed) {
6469
whoAddress: whoAccount.address
6570
};
6671
}
72+
73+
/**
74+
* Build a UnixFS DAG-PB file node from raw chunks.
75+
*
76+
* (By default with SHA2 multihash)
77+
*/
78+
export async function buildUnixFSDagPB(chunks, mhCode = 0x12) {
79+
if (!chunks?.length) {
80+
throw new Error('❌ buildUnixFSDag: chunks[] is empty')
81+
}
82+
83+
// UnixFS blockSizes = sizes of child blocks
84+
const blockSizes = chunks.map(c => c.len)
85+
86+
console.log(`🧩 Building UnixFS DAG from chunks:
87+
β€’ totalChunks: ${chunks.length}
88+
β€’ blockSizes: ${blockSizes.join(', ')}`)
89+
90+
// Build UnixFS file metadata (no inline data here)
91+
const fileData = new UnixFS({
92+
type: 'file',
93+
blockSizes
94+
})
95+
96+
// DAG-PB node: our file with chunk links
97+
const dagNode = dagPB.prepare({
98+
Data: fileData.marshal(),
99+
Links: chunks.map(c => ({
100+
Name: '',
101+
Tsize: c.len,
102+
Hash: c.cid
103+
}))
104+
})
105+
106+
// Encode DAG-PB
107+
const dagBytes = dagPB.encode(dagNode)
108+
109+
// Hash DAG to produce CIDv1
110+
const rootCid = await cidFromBytes(dagBytes, dagPB.code, mhCode)
111+
112+
console.log(`βœ… DAG root CID: ${rootCid.toString()}`)
113+
114+
return { rootCid, dagBytes }
115+
}
116+
117+
/**
118+
* Generates (dynamic) images based on the input text.
119+
*/
120+
export function generateTextImage(file, text, width = 800, height = 600) {
121+
const canvas = createCanvas(width, height);
122+
const ctx = canvas.getContext("2d");
123+
124+
// 🎨 Background
125+
ctx.fillStyle = randomColor();
126+
ctx.fillRect(0, 0, width, height);
127+
128+
// 🟠 Random shapes
129+
for (let i = 0; i < 15; i++) {
130+
ctx.beginPath();
131+
ctx.fillStyle = randomColor();
132+
ctx.arc(
133+
Math.random() * width,
134+
Math.random() * height,
135+
Math.random() * 120,
136+
0,
137+
Math.PI * 2
138+
);
139+
ctx.fill();
140+
}
141+
142+
// ✍️ Draw your text
143+
ctx.font = "bold 40px Sans";
144+
ctx.fillStyle = "white";
145+
ctx.textAlign = "center";
146+
ctx.textBaseline = "middle";
147+
148+
// Add text with shadow for readability
149+
ctx.shadowColor = "black";
150+
ctx.shadowBlur = 8;
151+
152+
ctx.fillText(text, width / 2, height / 2);
153+
154+
let jpegBytes = canvas.toBuffer("image/jpeg");
155+
fs.writeFileSync(file, jpegBytes);
156+
console.log("Saved to file:", file);
157+
}
158+
159+
function randomColor() {
160+
return `rgb(${rand255()}, ${rand255()}, ${rand255()})`;
161+
}
162+
163+
function rand255() {
164+
return Math.floor(Math.random() * 256);
165+
}
166+
167+
export function filesAreEqual(path1, path2) {
168+
const data1 = fs.readFileSync(path1);
169+
const data2 = fs.readFileSync(path2);
170+
assert.deepStrictEqual(data1.length, data2.length)
171+
172+
for (let i = 0; i < data1.length; i++) {
173+
assert.deepStrictEqual(data1[i], data2[i])
174+
}
175+
}

β€Žexamples/justfileβ€Ž

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -289,3 +289,25 @@ run-authorize-and-store mode="ws" runtime="bulletin-polkadot-runtime": (build ru
289289
just teardown-services
290290
exit $EXAMPLE_EXIT
291291

292+
# Run store chunked data example with Docker IPFS
293+
run-store-chunked-data: build npm-install
294+
#!/usr/bin/env bash
295+
set -e
296+
297+
echo "πŸš€ Starting store chunked data + DAG-PB workflow test ..."
298+
echo ""
299+
300+
just setup-services
301+
node store_chunked_data.js
302+
EXAMPLE_EXIT=$?
303+
304+
echo ""
305+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
306+
if [ $EXAMPLE_EXIT -eq 0 ]; then
307+
echo "βœ… Example completed successfully!"
308+
else
309+
echo "❌ Example failed with exit code $EXAMPLE_EXIT"
310+
fi
311+
312+
just teardown-services
313+
exit $EXAMPLE_EXIT

β€Žexamples/package.jsonβ€Ž

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,11 @@
1111
"@polkadot/keyring": "^13.5.8",
1212
"@polkadot/util": "^13.5.8",
1313
"@polkadot/util-crypto": "^13.5.8",
14+
"canvas": "^3.2.0",
1415
"fs": "^0.0.1-security",
1516
"ipfs-http-client": "^60.0.1",
1617
"multiformats": "^13.4.1",
17-
"polkadot-api": "^1.20.6",
18+
"polkadot-api": "^1.22.0",
1819
"smoldot": "^2.0.40",
1920
"ws": "^8.18.0"
2021
},

0 commit comments

Comments
Β (0)