@@ -56,6 +56,7 @@ const stats = {
5656 startTime : null ,
5757 endTime : null ,
5858 blockNumbers : [ ] , // Track all block numbers where txs were included
59+ blockHashes : { } , // Map block number -> block hash for timestamp lookups
5960} ;
6061
6162function waitForQueueLength ( targetLength , timeoutMs = 300000 ) {
@@ -105,6 +106,9 @@ async function processJob(typedApi, workerId, signer, chunk) {
105106 pushToResultQueue ( { cid, blockNumber } ) ;
106107 if ( blockNumber !== undefined ) {
107108 stats . blockNumbers . push ( blockNumber ) ;
109+ if ( blockHash && ! stats . blockHashes [ blockNumber ] ) {
110+ stats . blockHashes [ blockNumber ] = blockHash ;
111+ }
108112 }
109113 console . log ( `Worker ${ workerId } tx included in block #${ blockNumber } with CID: ${ cid } ` ) ;
110114}
@@ -126,7 +130,7 @@ function formatDuration(ms) {
126130 return ms + ' ms' ;
127131}
128132
129- function printStatistics ( dataSize ) {
133+ async function printStatistics ( dataSize , typedApi ) {
130134 const numTxs = stats . blockNumbers . length ;
131135 const elapsed = stats . endTime - stats . startTime ;
132136
@@ -141,29 +145,50 @@ function printStatistics(dataSize) {
141145 txsPerBlock [ blockNum ] = ( txsPerBlock [ blockNum ] || 0 ) + 1 ;
142146 }
143147 const numBlocksWithTxs = Object . keys ( txsPerBlock ) . length ;
144- const avgTxsPerBlock = numBlocksWithTxs > 0 ? ( numTxs / numBlocksWithTxs ) . toFixed ( 2 ) : 'N/A' ;
148+ const totalBlocksInRange = blocksElapsed + 1 ;
149+ const avgTxsPerBlock = totalBlocksInRange > 0 ? ( numTxs / totalBlocksInRange ) . toFixed ( 2 ) : 'N/A' ;
150+
151+ // Fetch block timestamps for all blocks in range
152+ const blockTimestamps = { } ;
153+ for ( let blockNum = startBlock ; blockNum <= endBlock ; blockNum ++ ) {
154+ try {
155+ // Get block hash - either from our stored hashes or query the chain
156+ let blockHash = stats . blockHashes [ blockNum ] ;
157+ if ( ! blockHash ) {
158+ blockHash = await typedApi . query . System . BlockHash . getValue ( blockNum ) ;
159+ }
160+ if ( blockHash ) {
161+ const timestamp = await typedApi . query . Timestamp . Now . getValue ( { at : blockHash } ) ;
162+ blockTimestamps [ blockNum ] = timestamp ;
163+ }
164+ } catch ( e ) {
165+ // Timestamp not available for this block
166+ }
167+ }
145168
146169 console . log ( '\n' ) ;
147- console . log ( '═══════════════════════════════════════════════════════════════════════════════' ) ;
148- console . log ( ' 📊 STORAGE STATISTICS ' ) ;
149- console . log ( '═══════════════════════════════════════════════════════════════════════════════' ) ;
170+ console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════ ' ) ;
171+ console . log ( ' 📊 STORAGE STATISTICS ' ) ;
172+ console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════ ' ) ;
150173 console . log ( `| File size | ${ formatBytes ( dataSize ) . padEnd ( 20 ) } |` ) ;
151174 console . log ( `| Chunk/TX size | ${ formatBytes ( CHUNK_SIZE ) . padEnd ( 20 ) } |` ) ;
152175 console . log ( `| Number of chunks | ${ numTxs . toString ( ) . padEnd ( 20 ) } |` ) ;
153- console . log ( `| Avg txs per block | ${ avgTxsPerBlock . toString ( ) . padEnd ( 20 ) } |` ) ;
176+ console . log ( `| Avg txs per block | ${ ` ${ avgTxsPerBlock } ( ${ numTxs } / ${ totalBlocksInRange } )` . padEnd ( 20 ) } |` ) ;
154177 console . log ( `| Time elapsed | ${ formatDuration ( elapsed ) . padEnd ( 20 ) } |` ) ;
155178 console . log ( `| Blocks elapsed | ${ `${ blocksElapsed } (#${ startBlock } → #${ endBlock } )` . padEnd ( 20 ) } |` ) ;
156179 console . log ( `| Throughput | ${ formatBytes ( dataSize / ( elapsed / 1000 ) ) . padEnd ( 20 ) } /s |` ) ;
157- console . log ( '═══════════════════════════════════════════════════════════════════════════════' ) ;
158- console . log ( ' 📦 TRANSACTIONS PER BLOCK ' ) ;
159- console . log ( '═══════════════════════════════════════════════════════════════════════════════' ) ;
180+ console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════ ' ) ;
181+ console . log ( ' 📦 TRANSACTIONS PER BLOCK ' ) ;
182+ console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════ ' ) ;
160183 for ( let blockNum = startBlock ; blockNum <= endBlock ; blockNum ++ ) {
161184 const count = txsPerBlock [ blockNum ] || 0 ;
162185 const size = count > 0 ? formatBytes ( count * CHUNK_SIZE ) : '-' ;
163- const bar = count > 0 ? '█' . repeat ( count ) : '' ;
164- console . log ( `| Block #${ blockNum . toString ( ) . padEnd ( 10 ) } | ${ count . toString ( ) . padStart ( 3 ) } txs | ${ size . padEnd ( 12 ) } | ${ bar } ` ) ;
186+ const bar = count > 0 ? '█' . repeat ( Math . min ( count , 20 ) ) : '' ;
187+ const timestamp = blockTimestamps [ blockNum ] ;
188+ const timeStr = timestamp ? new Date ( Number ( timestamp ) ) . toISOString ( ) . replace ( 'T' , ' ' ) . replace ( 'Z' , '' ) : '-' ;
189+ console . log ( `| Block #${ blockNum . toString ( ) . padEnd ( 8 ) } | ${ timeStr . padEnd ( 23 ) } | ${ count . toString ( ) . padStart ( 3 ) } txs | ${ size . padEnd ( 12 ) } | ${ bar } ` ) ;
165190 }
166- console . log ( '═══════════════════════════════════════════════════════════════════════════════' ) ;
191+ console . log ( '═══════════════════════════════════════════════════════════════════════════════════════════════════ ' ) ;
167192 console . log ( '\n' ) ;
168193}
169194
@@ -305,7 +330,7 @@ async function main() {
305330 ) ;
306331
307332 // Print storage statistics
308- printStatistics ( dataSize ) ;
333+ await printStatistics ( dataSize , bulletinAPI ) ;
309334
310335 logTestResult ( true , 'Store Big Data Test' ) ;
311336 resultCode = 0 ;
0 commit comments