|
14 | 14 | * - Optimized batch retrieval through DataLoader patterns |
15 | 15 | */ |
16 | 16 |
|
17 | | -import { FindOptions, Op, QueryTypes } from 'sequelize'; |
| 17 | +import { Op, QueryTypes } from 'sequelize'; |
18 | 18 | import { rootPgPool, sequelize } from '../../../../config/database'; |
19 | | -import BlockModel, { BlockAttributes } from '../../../../models/block'; |
| 19 | +import BlockModel from '../../../../models/block'; |
20 | 20 | import BlockRepository, { |
21 | 21 | BlockOutput, |
22 | 22 | GetBlocksBetweenHeightsParams, |
23 | 23 | GetBlocksFromDepthParams, |
24 | 24 | GetCompletedBlocksParams, |
25 | 25 | GetLatestBlocksParams, |
| 26 | + UpdateCanonicalStatusParams, |
26 | 27 | } from '../../application/block-repository'; |
27 | 28 | import { getPageInfo, getPaginationParams } from '../../pagination'; |
28 | 29 | import { blockValidator } from '../schema-validator/block-schema-validator'; |
@@ -53,14 +54,12 @@ export default class BlockDbRepository implements BlockRepository { |
53 | 54 | * @returns Promise resolving to the block data if found |
54 | 55 | * @throws Error if the block is not found |
55 | 56 | */ |
56 | | - async getBlockByHash(hash: string) { |
| 57 | + async getBlockByHash(hash: string): Promise<BlockOutput | null> { |
57 | 58 | const block = await BlockModel.findOne({ |
58 | 59 | where: { hash }, |
59 | 60 | }); |
60 | 61 |
|
61 | | - if (!block) { |
62 | | - throw new Error('Block not found.'); |
63 | | - } |
| 62 | + if (!block) return null; |
64 | 63 |
|
65 | 64 | return blockValidator.mapFromSequelize(block); |
66 | 65 | } |
@@ -314,7 +313,8 @@ export default class BlockDbRepository implements BlockRepository { |
314 | 313 | b.target as "target", |
315 | 314 | b.coinbase as "coinbase", |
316 | 315 | b.adjacents as "adjacents", |
317 | | - b.parent as "parent" |
| 316 | + b.parent as "parent", |
| 317 | + b.canonical as "canonical" |
318 | 318 | FROM "Blocks" b |
319 | 319 | WHERE b.height >= $2 |
320 | 320 | ${conditions} |
@@ -594,6 +594,7 @@ export default class BlockDbRepository implements BlockRepository { |
594 | 594 | b.coinbase as "coinbase", |
595 | 595 | b.adjacents as "adjacents", |
596 | 596 | b.parent as "parent", |
| 597 | + b.canonical as "canonical", |
597 | 598 | t.id as "transactionId" |
598 | 599 | FROM "Blocks" b |
599 | 600 | JOIN "Transactions" t ON b.id = t."blockId" |
@@ -641,7 +642,8 @@ export default class BlockDbRepository implements BlockRepository { |
641 | 642 | b.target as "target", |
642 | 643 | b.coinbase as "coinbase", |
643 | 644 | b.adjacents as "adjacents", |
644 | | - b.parent as "parent" |
| 645 | + b.parent as "parent", |
| 646 | + b.canonical as "canonical" |
645 | 647 | FROM "Blocks" b |
646 | 648 | WHERE b.hash = ANY($1::text[])`, |
647 | 649 | [hashes], |
@@ -923,4 +925,84 @@ export default class BlockDbRepository implements BlockRepository { |
923 | 925 |
|
924 | 926 | return Object.assign({}, ...maxHeightsArray); |
925 | 927 | } |
| 928 | + |
| 929 | + async getBlocksWithSameHeight(height: number, chainId: string): Promise<BlockOutput[]> { |
| 930 | + const query = ` |
| 931 | + SELECT b.* |
| 932 | + FROM "Blocks" b |
| 933 | + WHERE b."height" = $1 AND b."chainId" = $2 |
| 934 | + `; |
| 935 | + |
| 936 | + const { rows } = await rootPgPool.query(query, [height, chainId]); |
| 937 | + |
| 938 | + return rows.map(row => blockValidator.validate(row)); |
| 939 | + } |
| 940 | + |
| 941 | + async getBlockNParent(depth: number, hash: string): Promise<string | undefined> { |
| 942 | + const query = ` |
| 943 | + WITH RECURSIVE BlockAncestors AS ( |
| 944 | + SELECT hash, parent, 1 AS depth, height |
| 945 | + FROM "Blocks" |
| 946 | + WHERE hash = $1 |
| 947 | + UNION ALL |
| 948 | + SELECT b.hash, b.parent, d.depth + 1 AS depth, b.height |
| 949 | + FROM BlockAncestors d |
| 950 | + JOIN "Blocks" b ON d.parent = b.hash |
| 951 | + WHERE d.depth < $2 |
| 952 | + ) |
| 953 | + SELECT parent as hash, depth |
| 954 | + FROM BlockAncestors |
| 955 | + ORDER BY depth DESC |
| 956 | + LIMIT 1; |
| 957 | + `; |
| 958 | + const { rows } = await rootPgPool.query(query, [hash, depth]); |
| 959 | + |
| 960 | + return rows?.[0]?.hash; |
| 961 | + } |
| 962 | + |
| 963 | + async getBlocksWithHeightHigherThan(height: number, chainId: string): Promise<BlockOutput[]> { |
| 964 | + const query = ` |
| 965 | + SELECT b.* |
| 966 | + FROM "Blocks" b |
| 967 | + WHERE b.height > $1 AND b."chainId" = $2; |
| 968 | + `; |
| 969 | + |
| 970 | + const { rows } = await rootPgPool.query(query, [height, chainId]); |
| 971 | + |
| 972 | + return rows.map(row => blockValidator.validate(row)); |
| 973 | + } |
| 974 | + |
| 975 | + async updateCanonicalStatus(params: UpdateCanonicalStatusParams) { |
| 976 | + const canonicalHashes = params.blocks |
| 977 | + .filter(change => change.canonical) |
| 978 | + .map(change => change.hash); |
| 979 | + const nonCanonicalHashes = params.blocks |
| 980 | + .filter(change => !change.canonical) |
| 981 | + .map(change => change.hash); |
| 982 | + |
| 983 | + await rootPgPool.query('BEGIN'); |
| 984 | + try { |
| 985 | + if (canonicalHashes.length > 0) { |
| 986 | + const canonicalQuery = ` |
| 987 | + UPDATE "Blocks" |
| 988 | + SET "canonical" = true |
| 989 | + WHERE hash = ANY($1) |
| 990 | + `; |
| 991 | + await rootPgPool.query(canonicalQuery, [canonicalHashes]); |
| 992 | + } |
| 993 | + |
| 994 | + if (nonCanonicalHashes.length > 0) { |
| 995 | + const nonCanonicalQuery = ` |
| 996 | + UPDATE "Blocks" |
| 997 | + SET "canonical" = false |
| 998 | + WHERE hash = ANY($1) |
| 999 | + `; |
| 1000 | + await rootPgPool.query(nonCanonicalQuery, [nonCanonicalHashes]); |
| 1001 | + } |
| 1002 | + await rootPgPool.query('COMMIT'); |
| 1003 | + } catch (error) { |
| 1004 | + await rootPgPool.query('ROLLBACK'); |
| 1005 | + throw error; |
| 1006 | + } |
| 1007 | + } |
926 | 1008 | } |
0 commit comments