Skip to content

Commit a26ea97

Browse files
committed
refactor!: BlockHash type cleanup
As Santiago proposed [here](#19899 (review)) makes sense to have `BlockHash` extend from `Fr` instead from `Buffer32` because the hash is the output of Poseidon2 hash and hence natively a field. Also introduced branding to avoid collisions with `Fr` and cleaned up the type from unused functions.
1 parent 7291e66 commit a26ea97

36 files changed

+156
-230
lines changed

yarn-project/archiver/src/modules/data_source_base.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import type { EthAddress } from '@aztec/foundation/eth-address';
44
import { isDefined } from '@aztec/foundation/types';
55
import type { FunctionSelector } from '@aztec/stdlib/abi';
66
import type { AztecAddress } from '@aztec/stdlib/aztec-address';
7-
import { CheckpointedL2Block, CommitteeAttestation, L2Block, type L2Tips } from '@aztec/stdlib/block';
7+
import { type BlockHash, CheckpointedL2Block, CommitteeAttestation, L2Block, type L2Tips } from '@aztec/stdlib/block';
88
import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
99
import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract';
1010
import { type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers';
@@ -121,7 +121,7 @@ export abstract class ArchiverDataSourceBase
121121
return this.store.getCheckpointedBlocks(from, limit);
122122
}
123123

124-
public getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
124+
public getBlockHeaderByHash(blockHash: BlockHash): Promise<BlockHeader | undefined> {
125125
return this.store.getBlockHeaderByHash(blockHash);
126126
}
127127

@@ -347,15 +347,15 @@ export abstract class ArchiverDataSourceBase
347347
return this.store.getBlocks(from, limit);
348348
}
349349

350-
public getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
350+
public getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
351351
return this.store.getCheckpointedBlockByHash(blockHash);
352352
}
353353

354354
public getCheckpointedBlockByArchive(archive: Fr): Promise<CheckpointedL2Block | undefined> {
355355
return this.store.getCheckpointedBlockByArchive(archive);
356356
}
357357

358-
public async getL2BlockByHash(blockHash: Fr): Promise<L2Block | undefined> {
358+
public async getL2BlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
359359
const checkpointedBlock = await this.store.getCheckpointedBlockByHash(blockHash);
360360
return checkpointedBlock?.block;
361361
}

yarn-project/archiver/src/store/block_store.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -351,7 +351,7 @@ export class BlockStore {
351351
}
352352

353353
private async addBlockToDatabase(block: L2Block, checkpointNumber: number, indexWithinCheckpoint: number) {
354-
const blockHash = BlockHash.fromField(await block.hash());
354+
const blockHash = await block.hash();
355355

356356
await this.#blocks.set(block.number, {
357357
header: block.header.toBuffer(),
@@ -624,7 +624,7 @@ export class BlockStore {
624624
}
625625
}
626626

627-
async getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
627+
async getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
628628
const blockNumber = await this.#blockHashIndex.getAsync(blockHash.toString());
629629
if (blockNumber === undefined) {
630630
return undefined;

yarn-project/archiver/src/store/kv_archiver_store.test.ts

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -335,7 +335,7 @@ describe('KVArchiverDataStore', () => {
335335
await store.addCheckpoints(publishedCheckpoints);
336336
const lastCheckpoint = publishedCheckpoints[publishedCheckpoints.length - 1];
337337
const lastBlock = lastCheckpoint.checkpoint.blocks[0];
338-
const blockHash = (await lastBlock.header.hash()).toField();
338+
const blockHash = await lastBlock.header.hash();
339339
const archive = lastBlock.archive.root;
340340

341341
// Verify block and header exist before removing
@@ -639,7 +639,7 @@ describe('KVArchiverDataStore', () => {
639639
// Check each block by its hash
640640
for (let i = 0; i < checkpoint.checkpoint.blocks.length; i++) {
641641
const block = checkpoint.checkpoint.blocks[i];
642-
const blockHash = (await block.header.hash()).toField();
642+
const blockHash = await block.header.hash();
643643
const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
644644

645645
expect(retrievedBlock).toBeDefined();
@@ -797,8 +797,8 @@ describe('KVArchiverDataStore', () => {
797797
await store.addProposedBlocks([block1, block2]);
798798

799799
// getBlockByHash should work for uncheckpointed blocks
800-
const hash1 = (await block1.header.hash()).toField();
801-
const hash2 = (await block2.header.hash()).toField();
800+
const hash1 = await block1.header.hash();
801+
const hash2 = await block2.header.hash();
802802

803803
const retrieved1 = await store.getBlockByHash(hash1);
804804
expect(retrieved1!.equals(block1)).toBe(true);
@@ -874,7 +874,7 @@ describe('KVArchiverDataStore', () => {
874874
});
875875
await store.addProposedBlocks([block1]);
876876

877-
const hash = (await block1.header.hash()).toField();
877+
const hash = await block1.header.hash();
878878

879879
// getCheckpointedBlockByHash should return undefined
880880
expect(await store.getCheckpointedBlockByHash(hash)).toBeUndefined();
@@ -1666,15 +1666,15 @@ describe('KVArchiverDataStore', () => {
16661666
it('retrieves a block by its hash', async () => {
16671667
const expectedCheckpoint = publishedCheckpoints[5];
16681668
const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
1669-
const blockHash = (await expectedBlock.header.hash()).toField();
1669+
const blockHash = await expectedBlock.header.hash();
16701670
const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
16711671

16721672
expect(retrievedBlock).toBeDefined();
16731673
expectCheckpointedBlockEquals(retrievedBlock!, expectedBlock, expectedCheckpoint);
16741674
});
16751675

16761676
it('returns undefined for non-existent block hash', async () => {
1677-
const nonExistentHash = Fr.random();
1677+
const nonExistentHash = BlockHash.random();
16781678
await expect(store.getCheckpointedBlockByHash(nonExistentHash)).resolves.toBeUndefined();
16791679
});
16801680
});
@@ -1707,15 +1707,15 @@ describe('KVArchiverDataStore', () => {
17071707

17081708
it('retrieves a block header by its hash', async () => {
17091709
const expectedBlock = publishedCheckpoints[7].checkpoint.blocks[0];
1710-
const blockHash = (await expectedBlock.header.hash()).toField();
1710+
const blockHash = await expectedBlock.header.hash();
17111711
const retrievedHeader = await store.getBlockHeaderByHash(blockHash);
17121712

17131713
expect(retrievedHeader).toBeDefined();
17141714
expect(retrievedHeader!.equals(expectedBlock.header)).toBe(true);
17151715
});
17161716

17171717
it('returns undefined for non-existent block hash', async () => {
1718-
const nonExistentHash = Fr.random();
1718+
const nonExistentHash = BlockHash.random();
17191719
await expect(store.getBlockHeaderByHash(nonExistentHash)).resolves.toBeUndefined();
17201720
});
17211721
});
@@ -3320,7 +3320,7 @@ describe('KVArchiverDataStore', () => {
33203320
await store.addProposedBlocks([block1, block2]);
33213321

33223322
// Verify block2 is retrievable by hash and archive before removal
3323-
const block2Hash = (await block2.header.hash()).toField();
3323+
const block2Hash = await block2.header.hash();
33243324
const block2Archive = block2.archive.root;
33253325

33263326
expect(await store.getBlockByHash(block2Hash)).toBeDefined();
@@ -3346,7 +3346,7 @@ describe('KVArchiverDataStore', () => {
33463346
}
33473347

33483348
// Verify block1's data is still intact
3349-
const block1Hash = (await block1.header.hash()).toField();
3349+
const block1Hash = await block1.header.hash();
33503350
const block1Archive = block1.archive.root;
33513351

33523352
expect(await store.getBlockByHash(block1Hash)).toBeDefined();

yarn-project/archiver/src/store/kv_archiver_store.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ export class KVArchiverDataStore implements ContractDataSource {
291291
* Returns the block for the given hash, or undefined if not exists.
292292
* @param blockHash - The block hash to return.
293293
*/
294-
getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
294+
getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
295295
return this.#blockStore.getCheckpointedBlockByHash(blockHash);
296296
}
297297
/**
@@ -312,8 +312,8 @@ export class KVArchiverDataStore implements ContractDataSource {
312312
* Returns the block for the given hash, or undefined if not exists.
313313
* @param blockHash - The block hash to return.
314314
*/
315-
getBlockByHash(blockHash: Fr): Promise<L2Block | undefined> {
316-
return this.#blockStore.getBlockByHash(BlockHash.fromField(blockHash));
315+
getBlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
316+
return this.#blockStore.getBlockByHash(blockHash);
317317
}
318318
/**
319319
* Returns the block for the given archive root, or undefined if not exists.
@@ -357,8 +357,8 @@ export class KVArchiverDataStore implements ContractDataSource {
357357
* Returns the block header for the given hash, or undefined if not exists.
358358
* @param blockHash - The block hash to return.
359359
*/
360-
getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
361-
return this.#blockStore.getBlockHeaderByHash(BlockHash.fromField(blockHash));
360+
getBlockHeaderByHash(blockHash: BlockHash): Promise<BlockHeader | undefined> {
361+
return this.#blockStore.getBlockHeaderByHash(blockHash);
362362
}
363363

364364
/**

yarn-project/archiver/src/store/log_store.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ export class LogStore {
271271
});
272272
}
273273

274-
#packWithBlockHash(blockHash: Fr, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
274+
#packWithBlockHash(blockHash: BlockHash, data: Buffer<ArrayBufferLike>[]): Buffer<ArrayBufferLike> {
275275
return Buffer.concat([blockHash.toBuffer(), ...data]);
276276
}
277277

@@ -282,7 +282,7 @@ export class LogStore {
282282
throw new Error('Failed to read block hash from log entry buffer');
283283
}
284284

285-
return BlockHash.fromField(blockHash);
285+
return new BlockHash(blockHash);
286286
}
287287

288288
deleteLogs(blocks: L2Block[]): Promise<boolean> {

yarn-project/archiver/src/test/mock_l2_block_source.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
195195
return checkpoint;
196196
}
197197

198-
public async getCheckpointedBlockByHash(blockHash: Fr): Promise<CheckpointedL2Block | undefined> {
198+
public async getCheckpointedBlockByHash(blockHash: BlockHash): Promise<CheckpointedL2Block | undefined> {
199199
for (const block of this.l2Blocks) {
200200
const hash = await block.hash();
201201
if (hash.equals(blockHash)) {
@@ -225,7 +225,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
225225
);
226226
}
227227

228-
public async getL2BlockByHash(blockHash: Fr): Promise<L2Block | undefined> {
228+
public async getL2BlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
229229
for (const block of this.l2Blocks) {
230230
const hash = await block.hash();
231231
if (hash.equals(blockHash)) {
@@ -240,7 +240,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
240240
return Promise.resolve(block);
241241
}
242242

243-
public async getBlockHeaderByHash(blockHash: Fr): Promise<BlockHeader | undefined> {
243+
public async getBlockHeaderByHash(blockHash: BlockHash): Promise<BlockHeader | undefined> {
244244
for (const block of this.l2Blocks) {
245245
const hash = await block.hash();
246246
if (hash.equals(blockHash)) {
@@ -322,7 +322,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
322322
return {
323323
data: txEffect,
324324
l2BlockNumber: block.number,
325-
l2BlockHash: BlockHash.fromField(await block.hash()),
325+
l2BlockHash: await block.hash(),
326326
txIndexInBlock: block.body.txEffects.indexOf(txEffect),
327327
};
328328
}
@@ -343,7 +343,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource {
343343
TxExecutionResult.SUCCESS,
344344
undefined,
345345
txEffect.transactionFee.toBigInt(),
346-
BlockHash.fromField(await block.hash()),
346+
await block.hash(),
347347
block.number,
348348
);
349349
}

yarn-project/aztec-node/src/aztec-node/server.ts

Lines changed: 11 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -570,8 +570,8 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
570570
* @returns The requested block.
571571
*/
572572
public async getBlock(block: BlockParameter): Promise<L2Block | undefined> {
573-
if (BlockHash.isL2BlockHash(block)) {
574-
return this.getBlockByHash(Fr.fromBuffer(block.toBuffer()));
573+
if (BlockHash.isBlockHash(block)) {
574+
return this.getBlockByHash(block);
575575
}
576576
const blockNumber = block === 'latest' ? await this.getBlockNumber() : (block as BlockNumber);
577577
if (blockNumber === BlockNumber.ZERO) {
@@ -585,9 +585,9 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
585585
* @param blockHash - The block hash being requested.
586586
* @returns The requested block.
587587
*/
588-
public async getBlockByHash(blockHash: Fr): Promise<L2Block | undefined> {
588+
public async getBlockByHash(blockHash: BlockHash): Promise<L2Block | undefined> {
589589
const initialBlockHash = await this.#getInitialHeaderHash();
590-
if (blockHash.equals(Fr.fromBuffer(initialBlockHash.toBuffer()))) {
590+
if (blockHash.equals(initialBlockHash)) {
591591
return this.buildInitialBlock();
592592
}
593593
return await this.blockSource.getL2BlockByHash(blockHash);
@@ -697,8 +697,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
697697
if (referenceBlock) {
698698
const initialBlockHash = await this.#getInitialHeaderHash();
699699
if (!referenceBlock.equals(initialBlockHash)) {
700-
const blockHashFr = Fr.fromBuffer(referenceBlock.toBuffer());
701-
const header = await this.blockSource.getBlockHeaderByHash(blockHashFr);
700+
const header = await this.blockSource.getBlockHeaderByHash(referenceBlock);
702701
if (!header) {
703702
throw new Error(
704703
`Block ${referenceBlock.toString()} not found in the node. This might indicate a reorg has occurred.`,
@@ -718,8 +717,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
718717
if (referenceBlock) {
719718
const initialBlockHash = await this.#getInitialHeaderHash();
720719
if (!referenceBlock.equals(initialBlockHash)) {
721-
const blockHashFr = Fr.fromBuffer(referenceBlock.toBuffer());
722-
const header = await this.blockSource.getBlockHeaderByHash(blockHashFr);
720+
const header = await this.blockSource.getBlockHeaderByHash(referenceBlock);
723721
if (!header) {
724722
throw new Error(
725723
`Block ${referenceBlock.toString()} not found in the node. This might indicate a reorg has occurred.`,
@@ -915,7 +913,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
915913
}
916914
return {
917915
l2BlockNumber: BlockNumber(Number(blockNumber)),
918-
l2BlockHash: BlockHash.fromField(blockHash),
916+
l2BlockHash: new BlockHash(blockHash),
919917
data: index,
920918
};
921919
});
@@ -1118,14 +1116,13 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
11181116
}
11191117

11201118
public async getBlockHeader(block: BlockParameter = 'latest'): Promise<BlockHeader | undefined> {
1121-
if (BlockHash.isL2BlockHash(block)) {
1119+
if (BlockHash.isBlockHash(block)) {
11221120
const initialBlockHash = await this.#getInitialHeaderHash();
11231121
if (block.equals(initialBlockHash)) {
11241122
// Block source doesn't handle initial header so we need to handle the case separately.
11251123
return this.worldStateSynchronizer.getCommitted().getInitialHeader();
11261124
}
1127-
const blockHashFr = Fr.fromBuffer(block.toBuffer());
1128-
return this.blockSource.getBlockHeaderByHash(blockHashFr);
1125+
return this.blockSource.getBlockHeaderByHash(block);
11291126
} else {
11301127
// Block source doesn't handle initial header so we need to handle the case separately.
11311128
const blockNumber = block === 'latest' ? await this.getBlockNumber() : (block as BlockNumber);
@@ -1442,15 +1439,14 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable {
14421439
return this.worldStateSynchronizer.getCommitted();
14431440
}
14441441

1445-
if (BlockHash.isL2BlockHash(block)) {
1442+
if (BlockHash.isBlockHash(block)) {
14461443
const initialBlockHash = await this.#getInitialHeaderHash();
14471444
if (block.equals(initialBlockHash)) {
14481445
// Block source doesn't handle initial header so we need to handle the case separately.
14491446
return this.worldStateSynchronizer.getSnapshot(BlockNumber.ZERO);
14501447
}
14511448

1452-
const blockHashFr = Fr.fromBuffer(block.toBuffer());
1453-
const header = await this.blockSource.getBlockHeaderByHash(blockHashFr);
1449+
const header = await this.blockSource.getBlockHeaderByHash(block);
14541450
if (!header) {
14551451
throw new Error(
14561452
`Block hash ${block.toString()} not found when querying world state. If the node API has been queried with anchor block hash possibly a reorg has occurred.`,

yarn-project/end-to-end/src/e2e_simple.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,12 @@ describe('e2e_simple', () => {
5959
const initialBlockByHash = await aztecNode.getBlock(initialHeaderHash);
6060
expect(initialBlockByHash).toBeDefined();
6161
const initialBlockHash = await initialBlockByHash!.hash();
62-
expect(initialBlockHash.equals(initialHeaderHash.toField())).toBeTrue();
62+
expect(initialBlockHash.equals(initialHeaderHash)).toBeTrue();
6363
expect(initialBlockByHash?.body.txEffects.length).toBe(0);
6464
const initialBlockByNumber = await aztecNode.getBlock(BlockNumber.ZERO);
6565
expect(initialBlockByNumber).toBeDefined();
6666
const initialBlockByNumberHash = await initialBlockByNumber!.hash();
67-
expect(initialBlockByNumberHash.equals(initialHeaderHash.toField())).toBeTrue();
67+
expect(initialBlockByNumberHash.equals(initialHeaderHash)).toBeTrue();
6868
expect(initialBlockByNumber?.body.txEffects.length).toBe(0);
6969
});
7070

yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,9 @@ describe('e2e_snapshot_sync', () => {
113113

114114
log.warn(`Checking for L2 block ${L2_TARGET_BLOCK_NUM} with hash ${blockHash} on both nodes`);
115115
const getBlockHashLeafIndex = (node: AztecNode) =>
116-
node.findLeavesIndexes(BlockNumber(L2_TARGET_BLOCK_NUM), MerkleTreeId.ARCHIVE, [blockHash]).then(([i]) => i);
116+
node
117+
.findLeavesIndexes(BlockNumber(L2_TARGET_BLOCK_NUM), MerkleTreeId.ARCHIVE, [blockHash.toFr()])
118+
.then(([i]) => i);
117119
expect(await getBlockHashLeafIndex(context.aztecNode)).toBeDefined();
118120
expect(await getBlockHashLeafIndex(node)).toBeDefined();
119121

@@ -228,7 +230,9 @@ describe('e2e_snapshot_sync', () => {
228230

229231
log.warn(`Checking for L2 block ${L2_TARGET_BLOCK_NUM} with hash ${blockHash} on both nodes`);
230232
const getBlockHashLeafIndex = (node: AztecNode) =>
231-
node.findLeavesIndexes(BlockNumber(L2_TARGET_BLOCK_NUM), MerkleTreeId.ARCHIVE, [blockHash]).then(([i]) => i);
233+
node
234+
.findLeavesIndexes(BlockNumber(L2_TARGET_BLOCK_NUM), MerkleTreeId.ARCHIVE, [blockHash.toFr()])
235+
.then(([i]) => i);
232236
expect(await getBlockHashLeafIndex(context.aztecNode)).toBeDefined();
233237
expect(await getBlockHashLeafIndex(node)).toBeDefined();
234238

yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ describe('KV TX pool', () => {
322322
// modify tx1 to return no archive indices
323323
tx1.data.constants.anchorBlockHeader.globalVariables.blockNumber = BlockNumber(1);
324324
const tx1HeaderHash = await tx1.data.constants.anchorBlockHeader.hash();
325-
const tx1HeaderHashFr = tx1HeaderHash.toField();
325+
const tx1HeaderHashFr = tx1HeaderHash;
326326
db.findLeafIndices.mockImplementation((tree, leaves) => {
327327
if (tree === MerkleTreeId.ARCHIVE) {
328328
return Promise.resolve((leaves as Fr[]).map(l => (l.equals(tx1HeaderHashFr) ? undefined : 1n)));

0 commit comments

Comments
 (0)