Skip to content

Commit b4cd5cf

Browse files
committed
Last nits
1 parent 8b0420e commit b4cd5cf

File tree

2 files changed

+30
-33
lines changed

2 files changed

+30
-33
lines changed

.github/workflows/integration-test.yml

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -160,13 +160,6 @@ jobs:
160160
working-directory: examples
161161
run: just run-test-store-chunked-data "${{ env.TEST_DIR }}"
162162

163-
# - name: Restart IPFS before store-big-data (Westend parachain)
164-
# working-directory: examples
165-
# run: |
166-
# echo "🔄 Restarting IPFS to clear state..."
167-
# just ipfs-shutdown "${{ env.TEST_DIR }}"
168-
# just ipfs-start "${{ env.TEST_DIR }}"
169-
170163
- name: Test store-big-data (Westend parachain)
171164
working-directory: examples
172165
run: just run-test-store-big-data "${{ env.TEST_DIR }}" "big32"
@@ -202,13 +195,6 @@ jobs:
202195
working-directory: examples
203196
run: just run-test-store-chunked-data "${{ env.TEST_DIR }}"
204197

205-
# - name: Restart IPFS before store-big-data (Polkadot solochain)
206-
# working-directory: examples
207-
# run: |
208-
# echo "🔄 Restarting IPFS to clear state..."
209-
# just ipfs-shutdown "${{ env.TEST_DIR }}"
210-
# just ipfs-start "${{ env.TEST_DIR }}"
211-
212198
- name: Test store-big-data (Polkadot solochain)
213199
working-directory: examples
214200
run: just run-test-store-big-data "${{ env.TEST_DIR }}" "big32"

examples/store_big_data.js

Lines changed: 30 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import fs from 'fs'
44
import os from "os";
55
import path from "path";
66
import assert from "assert";
7-
import { authorizeAccount, store, fetchCid, TX_MODE_FINALIZED_BLOCK } from "./api.js";
7+
import {authorizeAccount, store, fetchCid, TX_MODE_FINALIZED_BLOCK, TX_MODE_IN_BLOCK} from "./api.js";
88
import { buildUnixFSDagPB, cidFromBytes } from "./cid_dag_metadata.js";
99
import {
1010
setupKeyringAndSigners,
@@ -154,40 +154,51 @@ async function printStatistics(dataSize, typedApi) {
154154
// Get block hash - either from our stored hashes or query the chain
155155
let blockHash = stats.blockHashes[blockNum];
156156
if (!blockHash) {
157-
blockHash = await typedApi.query.System.BlockHash.getValue(blockNum);
157+
const queriedHash = await typedApi.query.System.BlockHash.getValue(blockNum);
158+
// Handle different hash formats (string, Binary, Uint8Array)
159+
// PAPI Binary objects have asHex() method, fall back to toString()
160+
const hashStr = typeof queriedHash === 'string'
161+
? queriedHash
162+
: (queriedHash?.asHex?.() || queriedHash?.toHex?.() || queriedHash?.toString?.() || '');
163+
// Check if hash is not empty (all zeros means pruned/unavailable)
164+
if (hashStr && !hashStr.match(/^(0x)?0+$/)) {
165+
blockHash = queriedHash;
166+
}
158167
}
159168
if (blockHash) {
160169
const timestamp = await typedApi.query.Timestamp.Now.getValue({ at: blockHash });
161170
blockTimestamps[blockNum] = timestamp;
162171
}
163172
} catch (e) {
164-
// Timestamp not available for this block
173+
console.error(`Failed to fetch timestamp for block #${blockNum}:`, e.message);
165174
}
166175
}
167176

168177
console.log('\n');
169-
console.log('═══════════════════════════════════════════════════════════════════════════════════════════════════');
170-
console.log(' 📊 STORAGE STATISTICS ');
171-
console.log('═══════════════════════════════════════════════════════════════════════════════════════════════════');
172-
console.log(`| File size | ${formatBytes(dataSize).padEnd(20)} |`);
173-
console.log(`| Chunk/TX size | ${formatBytes(CHUNK_SIZE).padEnd(20)} |`);
174-
console.log(`| Number of chunks | ${numTxs.toString().padEnd(20)} |`);
175-
console.log(`| Avg txs per block | ${`${avgTxsPerBlock} (${numTxs}/${totalBlocksInRange})`.padEnd(20)} |`);
176-
console.log(`| Time elapsed | ${formatDuration(elapsed).padEnd(20)} |`);
177-
console.log(`| Blocks elapsed | ${`${blocksElapsed} (#${startBlock} → #${endBlock})`.padEnd(20)} |`);
178-
console.log(`| Throughput | ${formatBytes(dataSize / (elapsed / 1000)).padEnd(20)} /s |`);
179-
console.log('═══════════════════════════════════════════════════════════════════════════════════════════════════');
180-
console.log(' 📦 TRANSACTIONS PER BLOCK ');
181-
console.log('═══════════════════════════════════════════════════════════════════════════════════════════════════');
178+
console.log('════════════════════════════════════════════════════════════════════════════════════════════════════════');
179+
console.log(' 📊 STORAGE STATISTICS ');
180+
console.log('════════════════════════════════════════════════════════════════════════════════════════════════════════');
181+
console.log(`│ File size │ ${formatBytes(dataSize).padEnd(25)} │`);
182+
console.log(`│ Chunk/TX size │ ${formatBytes(CHUNK_SIZE).padEnd(25)} │`);
183+
console.log(`│ Number of chunks │ ${numTxs.toString().padEnd(25)} │`);
184+
console.log(`│ Avg txs per block │ ${`${avgTxsPerBlock} (${numTxs}/${totalBlocksInRange})`.padEnd(25)} │`);
185+
console.log(`│ Time elapsed │ ${formatDuration(elapsed).padEnd(25)} │`);
186+
console.log(`│ Blocks elapsed │ ${`${blocksElapsed} (#${startBlock} → #${endBlock})`.padEnd(25)} │`);
187+
console.log(`│ Throughput │ ${formatBytes(dataSize / (elapsed / 1000)).padEnd(22)} /s │`);
188+
console.log('════════════════════════════════════════════════════════════════════════════════════════════════════════');
189+
console.log(' 📦 TRANSACTIONS PER BLOCK ');
190+
console.log('════════════════════════════════════════════════════════════════════════════════════════════════════════');
191+
console.log('│ Block │ Time │ TXs │ Size │ Bar │');
192+
console.log('├─────────────┼─────────────────────────┼─────┼──────────────┼──────────────────────┤');
182193
for (let blockNum = startBlock; blockNum <= endBlock; blockNum++) {
183194
const count = txsPerBlock[blockNum] || 0;
184195
const size = count > 0 ? formatBytes(count * CHUNK_SIZE) : '-';
185196
const bar = count > 0 ? '█'.repeat(Math.min(count, 20)) : '';
186197
const timestamp = blockTimestamps[blockNum];
187198
const timeStr = timestamp ? new Date(Number(timestamp)).toISOString().replace('T', ' ').replace('Z', '') : '-';
188-
console.log(`| Block #${blockNum.toString().padEnd(8)} | ${timeStr.padEnd(23)} | ${count.toString().padStart(3)} txs | ${size.padEnd(12)} | ${bar}`);
199+
console.log(`#${blockNum.toString().padEnd(10)} ${timeStr.padEnd(23)} ${count.toString().padStart(3)} ${size.padEnd(12)} ${bar.padEnd(20)}`);
189200
}
190-
console.log('═══════════════════════════════════════════════════════════════════════════════════════════════════');
201+
console.log('════════════════════════════════════════════════════════════════════════════════════════════════════════');
191202
console.log('\n');
192203
}
193204

@@ -296,7 +307,7 @@ async function main() {
296307
dagBytes,
297308
0x70, // dag-pb codec
298309
0xb220, // blake2b-256
299-
TX_MODE_FINALIZED_BLOCK
310+
TX_MODE_IN_BLOCK
300311
);
301312
console.log(`Downloading...${cid} / ${rootCid}`);
302313
assert.deepStrictEqual(cid, rootCid, '❌ CID mismatch between stored and computed DAG root');

0 commit comments

Comments
 (0)