1- import { blake2AsU8a } from '@polkadot/util-crypto'
2- import * as multihash from 'multiformats/hashes/digest'
3- import { CID } from 'multiformats/cid'
41import { Keyring } from '@polkadot/keyring' ;
52import { getPolkadotSigner } from '@polkadot-api/signer' ;
6- import * as dagPB from '@ipld/dag-pb'
7- import { UnixFS } from 'ipfs-unixfs'
83import { createCanvas } from "canvas" ;
94import fs from "fs" ;
105import assert from "assert" ;
@@ -15,35 +10,6 @@ export async function waitForNewBlock() {
1510 return new Promise ( resolve => setTimeout ( resolve , 7000 ) )
1611}
1712
18- /**
19- * Create CID for data.
20- * Default to `0x55 (raw)` with blake2b_256 hash.
21- *
22- * 0xb220:
23- * - 0xb2 = the multihash algorithm family for BLAKE2b
24- * - 0x20 = the digest length in bytes (32 bytes = 256 bits)
25- *
26- * See: https://github.com/multiformats/multicodec/blob/master/table.csv
27- */
28- export async function cidFromBytes ( bytes , cidCodec = 0x55 , mhCode = 0xb220 ) {
29- console . log ( `[CID]: Using cidCodec: ${ cidCodec } and mhCode: ${ mhCode } ` ) ;
30- let mh ;
31- switch ( mhCode ) {
32- case 0xb220 : // blake2b-256
33- mh = multihash . create ( mhCode , blake2AsU8a ( bytes ) ) ;
34- break ;
35-
36- default :
37- throw new Error ( "Unhandled multihash code: " + mhCode )
38- }
39- return CID . createV1 ( cidCodec , mh )
40- }
41-
42- export function convertCid ( cid , cidCodec ) {
43- const mh = cid . multihash ;
44- return CID . createV1 ( cidCodec , mh ) ;
45- }
46-
4713/**
4814 * Creates a PAPI-compatible signer from a Keyring account
4915 */
@@ -70,50 +36,6 @@ export function setupKeyringAndSigners(sudoSeed, accountSeed) {
7036 } ;
7137}
7238
73- /**
74- * Build a UnixFS DAG-PB file node from raw chunks.
75- *
76- * (By default with SHA2 multihash)
77- */
78- export async function buildUnixFSDagPB ( chunks , mhCode = 0x12 ) {
79- if ( ! chunks ?. length ) {
80- throw new Error ( '❌ buildUnixFSDag: chunks[] is empty' )
81- }
82-
83- // UnixFS blockSizes = sizes of child blocks
84- const blockSizes = chunks . map ( c => c . len )
85-
86- console . log ( `🧩 Building UnixFS DAG from chunks:
87- • totalChunks: ${ chunks . length }
88- • blockSizes: ${ blockSizes . join ( ', ' ) } ` )
89-
90- // Build UnixFS file metadata (no inline data here)
91- const fileData = new UnixFS ( {
92- type : 'file' ,
93- blockSizes
94- } )
95-
96- // DAG-PB node: our file with chunk links
97- const dagNode = dagPB . prepare ( {
98- Data : fileData . marshal ( ) ,
99- Links : chunks . map ( c => ( {
100- Name : '' ,
101- Tsize : c . len ,
102- Hash : c . cid
103- } ) )
104- } )
105-
106- // Encode DAG-PB
107- const dagBytes = dagPB . encode ( dagNode )
108-
109- // Hash DAG to produce CIDv1
110- const rootCid = await cidFromBytes ( dagBytes , dagPB . code , mhCode )
111-
112- console . log ( `✅ DAG root CID: ${ rootCid . toString ( ) } ` )
113-
114- return { rootCid, dagBytes }
115- }
116-
11739/**
11840 * Generates (dynamic) images based on the input text.
11941 */
@@ -173,3 +95,26 @@ export function filesAreEqual(path1, path2) {
17395 assert . deepStrictEqual ( data1 [ i ] , data2 [ i ] )
17496 }
17597}
98+
99+ export async function fileToDisk ( outputPath , fullBuffer ) {
100+ await new Promise ( ( resolve , reject ) => {
101+ const ws = fs . createWriteStream ( outputPath ) ;
102+ ws . write ( fullBuffer ) ;
103+ ws . end ( ) ;
104+ ws . on ( 'finish' , resolve ) ;
105+ ws . on ( 'error' , reject ) ;
106+ } ) ;
107+ console . log ( `💾 File saved to: ${ outputPath } ` ) ;
108+ }
109+
110+ export class NonceManager {
111+ constructor ( initialNonce ) {
112+ this . nonce = initialNonce ; // BN instance from api.query.system.account
113+ }
114+
115+ getAndIncrement ( ) {
116+ const current = this . nonce ;
117+ this . nonce = this . nonce . addn ( 1 ) ; // increment BN
118+ return current ;
119+ }
120+ }
0 commit comments