11import { rootPgPool , sequelize } from '@/config/database' ;
22import { getRequiredEnvString } from '@/utils/helpers' ;
33import { processPayload , saveBlock } from './streaming' ;
4+ import { Transaction } from 'sequelize' ;
45
56const SYNC_BASE_URL = getRequiredEnvString ( 'SYNC_BASE_URL' ) ;
67const NETWORK_ID = getRequiredEnvString ( 'SYNC_NETWORK' ) ;
@@ -185,6 +186,11 @@ export async function fillChainGapsBeforeDefiningCanonicalBaseline({
185186 const fromHeight = rows [ 0 ] . height + 1 ;
186187 const toHeight = lastHeight - 1 ;
187188
189+ if ( fromHeight > toHeight ) {
190+ console . info ( `[INFO][SYNC][MISSING] No gaps to fill for chain ${ chainId } ` ) ;
191+ return ;
192+ }
193+
188194 const url = `${ SYNC_BASE_URL } /${ NETWORK_ID } /chain/${ chainId } /block/branch?minheight=${ fromHeight } &maxheight=${ toHeight } ` ;
189195
190196 const res = await fetch ( url , {
@@ -210,133 +216,9 @@ export async function fillChainGapsBeforeDefiningCanonicalBaseline({
210216 console . info ( `[INFO][SYNC][MISSING] Initial chain gaps filled:` , chainId , fromHeight , toHeight ) ;
211217 } catch ( error ) {
212218 console . error (
213- `[ERROR][SYNC][SYNC_TIMEOUT] Error filling chain ${ chainId } gaps before defining canonical baseline:` ,
214- error ,
215- ) ;
216- }
217- }
218-
219- export async function checkCanonicalPathForAllChains ( ) {
220- const chainsSynced = [ ] ;
221- const chains = Array . from ( { length : 20 } , ( _ , i ) => i . toString ( ) ) ;
222-
223- try {
224- for ( const chainId of chains ) {
225- const query = `
226- SELECT hash
227- FROM "Blocks"
228- WHERE "chainId" = $1 AND height = (SELECT MAX(height) FROM "Blocks" WHERE "chainId" = $1)
229- ` ;
230- const { rows } = await rootPgPool . query ( query , [ chainId ] ) ;
231- const blockHash = rows ?. [ 0 ] ?. hash ;
232- const isSynced = await checkCanonicalPathStartingFromSpecificBlock ( blockHash ) ;
233-
234- if ( isSynced ) {
235- chainsSynced . push ( chainId ) ;
236- }
237- }
238-
239- if ( chainsSynced . length === 0 ) {
240- console . info ( '[INFO][SYNC][MISSING] No chains to sync' ) ;
241- } else {
242- console . info (
243- `[INFO][SYNC][MISSING] Successfully synced ${ chainsSynced . length } chains: ${ chainsSynced . join (
244- ', ' ,
245- ) } `,
246- ) ;
247- }
248- } catch ( error ) {
249- console . error (
250- `[ERROR][SYNC][SYNC_TIMEOUT] Error checking canonical path for all chains:` ,
219+ `[FATAL][SYNC][MISSING] Error filling chain ${ chainId } gaps before defining canonical baseline:` ,
251220 error ,
252221 ) ;
222+ process . exit ( 1 ) ;
253223 }
254224}
255-
256- async function checkCanonicalPathStartingFromSpecificBlock (
257- blockHash : string ,
258- maxAttempts = 20 ,
259- ) : Promise < boolean > {
260- let attempts = 0 ;
261- let ancestors = await findCanonicalBaseline ( blockHash ) ;
262-
263- while ( ancestors . length < CANONICAL_BASE_LINE_LENGTH && attempts < maxAttempts ) {
264- console . info (
265- `[INFO][SYNC][MISSING] Attempt ${ attempts + 1 } : Found ${ ancestors . length } blocks, need ${ CANONICAL_BASE_LINE_LENGTH } ` ,
266- ) ;
267-
268- // Get the lowest block we have
269- const lowestBlock = ancestors [ ancestors . length - 1 ] ;
270-
271- // Fetch and save the parent block
272- await fetchAndSaveBlocks ( lowestBlock . chainId , lowestBlock . height - 1 ) ;
273-
274- // Recalculate ancestors
275- ancestors = await findCanonicalBaseline ( blockHash ) ;
276- attempts ++ ;
277- }
278-
279- if ( ancestors . length < CANONICAL_BASE_LINE_LENGTH ) {
280- throw new Error (
281- `[ERROR][SYNC][SYNC_TIMEOUT] Failed to build complete canonical path after ${ maxAttempts } attempts. Only found ${ ancestors . length } blocks.` ,
282- ) ;
283- }
284-
285- return attempts > 0 ;
286- }
287-
288- async function findCanonicalBaseline ( hash : string ) {
289- const query = `
290- WITH RECURSIVE BlockAncestors AS (
291- SELECT hash, parent, 1 AS depth, height, "chainId"
292- FROM "Blocks"
293- WHERE hash = $1
294- UNION ALL
295- SELECT b.hash, b.parent, d.depth + 1 AS depth, b.height, b."chainId"
296- FROM BlockAncestors d
297- JOIN "Blocks" b ON d.parent = b.hash
298- WHERE d.depth < $2
299- )
300- SELECT parent as hash, height, "chainId"
301- FROM BlockAncestors
302- ORDER BY height DESC
303- ` ;
304-
305- const { rows } = await rootPgPool . query ( query , [ hash , CANONICAL_BASE_LINE_LENGTH ] ) ;
306- return rows ;
307- }
308-
309- async function fetchAndSaveBlocks ( chainId : number , height : number ) {
310- const url = `${ SYNC_BASE_URL } /${ NETWORK_ID } /chain/${ chainId } /block?minheight=${ height } &maxheight=${ height } ` ;
311- const res = await fetch ( url , {
312- method : 'GET' ,
313- headers : {
314- 'Content-Type' : 'application/json' ,
315- } ,
316- } ) ;
317-
318- const data = await res . json ( ) ;
319-
320- const tx = await sequelize . transaction ( ) ;
321- try {
322- const promises = data . items . map ( async ( item : any ) => {
323- const payload = processPayload ( item . payloadWithOutputs ) ;
324- const block = await Block . findOne ( { where : { hash : item . header . hash } } ) ;
325- if ( block ) {
326- return Promise . resolve ( ) ;
327- } else {
328- return saveBlock ( { header : item . header , payload, canonical : true } , tx ) ;
329- }
330- } ) ;
331-
332- await Promise . all ( promises ) ;
333- await tx . commit ( ) ;
334- console . info (
335- `[INFO][SYNC][MISSING] Successfully synced blocks at height ${ height } for chain ${ chainId } ` ,
336- ) ;
337- } catch ( err ) {
338- await tx . rollback ( ) ;
339- throw err ;
340- }
341- }
342-
0 commit comments