@@ -4,7 +4,7 @@ import fs from 'fs'
44import os from "os" ;
55import path from "path" ;
66import assert from "assert" ;
7- import { authorizeAccount , store , fetchCid , TX_MODE_FINALIZED_BLOCK } from "./api.js" ;
7+ import { authorizeAccount , store , fetchCid , TX_MODE_FINALIZED_BLOCK , TX_MODE_IN_BLOCK } from "./api.js" ;
88import { buildUnixFSDagPB , cidFromBytes } from "./cid_dag_metadata.js" ;
99import {
1010 setupKeyringAndSigners ,
@@ -154,40 +154,51 @@ async function printStatistics(dataSize, typedApi) {
154154 // Get block hash - either from our stored hashes or query the chain
155155 let blockHash = stats . blockHashes [ blockNum ] ;
156156 if ( ! blockHash ) {
157- blockHash = await typedApi . query . System . BlockHash . getValue ( blockNum ) ;
157+ const queriedHash = await typedApi . query . System . BlockHash . getValue ( blockNum ) ;
158+ // Handle different hash formats (string, Binary, Uint8Array)
159+ // PAPI Binary objects have asHex() method, fall back to toString()
160+ const hashStr = typeof queriedHash === 'string'
161+ ? queriedHash
162+ : ( queriedHash ?. asHex ?. ( ) || queriedHash ?. toHex ?. ( ) || queriedHash ?. toString ?. ( ) || '' ) ;
163+ // Check if hash is not empty (all zeros means pruned/unavailable)
164+ if ( hashStr && ! hashStr . match ( / ^ ( 0 x ) ? 0 + $ / ) ) {
165+ blockHash = queriedHash ;
166+ }
158167 }
159168 if ( blockHash ) {
160169 const timestamp = await typedApi . query . Timestamp . Now . getValue ( { at : blockHash } ) ;
161170 blockTimestamps [ blockNum ] = timestamp ;
162171 }
163172 } catch ( e ) {
164- // Timestamp not available for this block
173+ console . error ( `Failed to fetch timestamp for block # ${ blockNum } :` , e . message ) ;
165174 }
166175 }
167176
168177 console . log ( '\n' ) ;
169- console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
170- console . log ( ' 📊 STORAGE STATISTICS ' ) ;
171- console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
172- console . log ( `| File size | ${ formatBytes ( dataSize ) . padEnd ( 20 ) } |` ) ;
173- console . log ( `| Chunk/TX size | ${ formatBytes ( CHUNK_SIZE ) . padEnd ( 20 ) } |` ) ;
174- console . log ( `| Number of chunks | ${ numTxs . toString ( ) . padEnd ( 20 ) } |` ) ;
175- console . log ( `| Avg txs per block | ${ `${ avgTxsPerBlock } (${ numTxs } /${ totalBlocksInRange } )` . padEnd ( 20 ) } |` ) ;
176- console . log ( `| Time elapsed | ${ formatDuration ( elapsed ) . padEnd ( 20 ) } |` ) ;
177- console . log ( `| Blocks elapsed | ${ `${ blocksElapsed } (#${ startBlock } → #${ endBlock } )` . padEnd ( 20 ) } |` ) ;
178- console . log ( `| Throughput | ${ formatBytes ( dataSize / ( elapsed / 1000 ) ) . padEnd ( 20 ) } /s |` ) ;
179- console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
180- console . log ( ' 📦 TRANSACTIONS PER BLOCK ' ) ;
181- console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
178+ console . log ( '════════════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
179+ console . log ( ' 📊 STORAGE STATISTICS ' ) ;
180+ console . log ( '════════════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
181+ console . log ( `│ File size │ ${ formatBytes ( dataSize ) . padEnd ( 25 ) } │` ) ;
182+ console . log ( `│ Chunk/TX size │ ${ formatBytes ( CHUNK_SIZE ) . padEnd ( 25 ) } │` ) ;
183+ console . log ( `│ Number of chunks │ ${ numTxs . toString ( ) . padEnd ( 25 ) } │` ) ;
184+ console . log ( `│ Avg txs per block │ ${ `${ avgTxsPerBlock } (${ numTxs } /${ totalBlocksInRange } )` . padEnd ( 25 ) } │` ) ;
185+ console . log ( `│ Time elapsed │ ${ formatDuration ( elapsed ) . padEnd ( 25 ) } │` ) ;
186+ console . log ( `│ Blocks elapsed │ ${ `${ blocksElapsed } (#${ startBlock } → #${ endBlock } )` . padEnd ( 25 ) } │` ) ;
187+ console . log ( `│ Throughput │ ${ formatBytes ( dataSize / ( elapsed / 1000 ) ) . padEnd ( 22 ) } /s │` ) ;
188+ console . log ( '════════════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
189+ console . log ( ' 📦 TRANSACTIONS PER BLOCK ' ) ;
190+ console . log ( '════════════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
191+ console . log ( '│ Block │ Time │ TXs │ Size │ Bar │' ) ;
192+ console . log ( '├─────────────┼─────────────────────────┼─────┼──────────────┼──────────────────────┤' ) ;
182193 for ( let blockNum = startBlock ; blockNum <= endBlock ; blockNum ++ ) {
183194 const count = txsPerBlock [ blockNum ] || 0 ;
184195 const size = count > 0 ? formatBytes ( count * CHUNK_SIZE ) : '-' ;
185196 const bar = count > 0 ? '█' . repeat ( Math . min ( count , 20 ) ) : '' ;
186197 const timestamp = blockTimestamps [ blockNum ] ;
187198 const timeStr = timestamp ? new Date ( Number ( timestamp ) ) . toISOString ( ) . replace ( 'T' , ' ' ) . replace ( 'Z' , '' ) : '-' ;
188- console . log ( `| Block #${ blockNum . toString ( ) . padEnd ( 8 ) } | ${ timeStr . padEnd ( 23 ) } | ${ count . toString ( ) . padStart ( 3 ) } txs | ${ size . padEnd ( 12 ) } | ${ bar } ` ) ;
199+ console . log ( `│ #${ blockNum . toString ( ) . padEnd ( 10 ) } │ ${ timeStr . padEnd ( 23 ) } │ ${ count . toString ( ) . padStart ( 3 ) } │ ${ size . padEnd ( 12 ) } │ ${ bar . padEnd ( 20 ) } │ ` ) ;
189200 }
190- console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════' ) ;
201+ console . log ( '════════════════════════════════════════════════════════════════════════════════════════════════════════ ' ) ;
191202 console . log ( '\n' ) ;
192203}
193204
@@ -296,7 +307,7 @@ async function main() {
296307 dagBytes ,
297308 0x70 , // dag-pb codec
298309 0xb220 , // blake2b-256
299- TX_MODE_FINALIZED_BLOCK
310+ TX_MODE_IN_BLOCK
300311 ) ;
301312 console . log ( `Downloading...${ cid } / ${ rootCid } ` ) ;
302313 assert . deepStrictEqual ( cid , rootCid , '❌ CID mismatch between stored and computed DAG root' ) ;
0 commit comments