Skip to content

Commit 87161d4

Browse files
author
AztecBot
committed
Merge branch 'next' into merge-train/barretenberg
2 parents 5b74774 + 65ce9a8 commit 87161d4

File tree

4 files changed

+165
-10
lines changed

4 files changed

+165
-10
lines changed
Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
import { type BlockBlobData, type CheckpointBlobData, makeBlockEndBlobData } from '@aztec/blob-lib/encoding';
2+
import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types';
3+
import { Fr } from '@aztec/foundation/curves/bn254';
4+
import { Body, CommitteeAttestation } from '@aztec/stdlib/block';
5+
import { L1PublishedData } from '@aztec/stdlib/checkpoint';
6+
import { CheckpointHeader } from '@aztec/stdlib/rollup';
7+
8+
import { type RetrievedCheckpoint, retrievedToPublishedCheckpoint } from './data_retrieval.js';
9+
10+
describe('data_retrieval', () => {
11+
describe('retrievedToPublishedCheckpoint', () => {
12+
it('handles multi-block checkpoint', async () => {
13+
// Create 3 different bodies with distinct transaction effects
14+
const body1 = await Body.random({ txsPerBlock: 2 });
15+
const body2 = await Body.random({ txsPerBlock: 2 });
16+
const body3 = await Body.random({ txsPerBlock: 2 });
17+
18+
// Convert to BlockBlobData
19+
const block1BlobData = makeBlockBlobDataFromBody(body1, BlockNumber(1), true, 1000);
20+
const block2BlobData = makeBlockBlobDataFromBody(body2, BlockNumber(2), false, 2000);
21+
const block3BlobData = makeBlockBlobDataFromBody(body3, BlockNumber(3), false, 3000);
22+
23+
// Calculate total blob fields for checkpoint end marker
24+
const numBlobFields = 100; // Approximate, doesn't need to be exact for this test
25+
26+
const checkpointBlobData: CheckpointBlobData = {
27+
blocks: [block1BlobData, block2BlobData, block3BlobData],
28+
checkpointEndMarker: { numBlobFields },
29+
};
30+
31+
const archiveRoot = Fr.random();
32+
33+
const retrievedCheckpoint: RetrievedCheckpoint = {
34+
checkpointNumber: CheckpointNumber(1),
35+
archiveRoot,
36+
header: CheckpointHeader.random(),
37+
checkpointBlobData,
38+
l1: new L1PublishedData(1n, 1000n, '0x1234'),
39+
chainId: new Fr(1),
40+
version: new Fr(1),
41+
attestations: [CommitteeAttestation.empty()],
42+
};
43+
44+
const publishedCheckpoint = await retrievedToPublishedCheckpoint(retrievedCheckpoint);
45+
46+
// Verify we got 3 blocks
47+
expect(publishedCheckpoint.checkpoint.blocks).toHaveLength(3);
48+
49+
// Verify each block has the correct number of txs
50+
for (const block of publishedCheckpoint.checkpoint.blocks) {
51+
expect(block.body.txEffects).toHaveLength(2);
52+
}
53+
54+
// The critical assertion: each block should have the tx hashes from its corresponding body
55+
const reconstructedBlock1 = publishedCheckpoint.checkpoint.blocks[0];
56+
const reconstructedBlock2 = publishedCheckpoint.checkpoint.blocks[1];
57+
const reconstructedBlock3 = publishedCheckpoint.checkpoint.blocks[2];
58+
59+
// Block 1 should have body1's tx hashes
60+
expect(reconstructedBlock1.body.txEffects.map(tx => tx.txHash.toString())).toEqual(
61+
body1.txEffects.map(tx => tx.txHash.toString()),
62+
);
63+
64+
// Block 2 should have body2's tx hashes
65+
expect(reconstructedBlock2.body.txEffects.map(tx => tx.txHash.toString())).toEqual(
66+
body2.txEffects.map(tx => tx.txHash.toString()),
67+
);
68+
69+
// Block 3 should have body3's tx hashes
70+
expect(reconstructedBlock3.body.txEffects.map(tx => tx.txHash.toString())).toEqual(
71+
body3.txEffects.map(tx => tx.txHash.toString()),
72+
);
73+
74+
// Also verify blocks are distinct from each other
75+
expect(reconstructedBlock1.body.txEffects.map(tx => tx.txHash.toString())).not.toEqual(
76+
reconstructedBlock2.body.txEffects.map(tx => tx.txHash.toString()),
77+
);
78+
expect(reconstructedBlock1.body.txEffects.map(tx => tx.txHash.toString())).not.toEqual(
79+
reconstructedBlock3.body.txEffects.map(tx => tx.txHash.toString()),
80+
);
81+
});
82+
83+
it('handles single-block checkpoint', async () => {
84+
const body1 = await Body.random({ txsPerBlock: 3 });
85+
const block1BlobData = makeBlockBlobDataFromBody(body1, BlockNumber(1), true, 5000);
86+
87+
const checkpointBlobData: CheckpointBlobData = {
88+
blocks: [block1BlobData],
89+
checkpointEndMarker: { numBlobFields: 50 },
90+
};
91+
92+
const archiveRoot = Fr.random();
93+
94+
const retrievedCheckpoint: RetrievedCheckpoint = {
95+
checkpointNumber: CheckpointNumber(1),
96+
archiveRoot,
97+
header: CheckpointHeader.random(),
98+
checkpointBlobData,
99+
l1: new L1PublishedData(1n, 1000n, '0x1234'),
100+
chainId: new Fr(1),
101+
version: new Fr(1),
102+
attestations: [],
103+
};
104+
105+
const publishedCheckpoint = await retrievedToPublishedCheckpoint(retrievedCheckpoint);
106+
107+
expect(publishedCheckpoint.checkpoint.blocks).toHaveLength(1);
108+
expect(publishedCheckpoint.checkpoint.blocks[0].body.txEffects).toHaveLength(3);
109+
110+
// Verify tx hashes match the original body
111+
expect(publishedCheckpoint.checkpoint.blocks[0].body.txEffects.map(tx => tx.txHash.toString())).toEqual(
112+
body1.txEffects.map(tx => tx.txHash.toString()),
113+
);
114+
});
115+
});
116+
});
117+
118+
/**
119+
* Helper to create a BlockBlobData from a Body. This ensures the blob data is compatible
120+
* with Body.fromTxBlobData.
121+
*/
122+
function makeBlockBlobDataFromBody(
123+
body: Body,
124+
blockNumber: BlockNumber,
125+
isFirstBlock: boolean,
126+
seed: number,
127+
): BlockBlobData {
128+
const blockEndBlobData = makeBlockEndBlobData({
129+
seed,
130+
isFirstBlock,
131+
blockEndMarker: {
132+
numTxs: body.txEffects.length,
133+
blockNumber,
134+
timestamp: BigInt(1000 + blockNumber),
135+
},
136+
});
137+
138+
return {
139+
txs: body.toTxBlobData(),
140+
...blockEndBlobData,
141+
};
142+
}

yarn-project/archiver/src/l1/data_retrieval.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ export async function retrievedToPublishedCheckpoint({
100100
}),
101101
});
102102

103-
const body = Body.fromTxBlobData(checkpointBlobData.blocks[0].txs);
103+
const body = Body.fromTxBlobData(blockBlobData.txs);
104104

105105
const blobFields = encodeBlockBlobData(blockBlobData);
106106
await spongeBlob.absorb(blobFields);

yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -364,10 +364,10 @@ describe('e2e_epochs/epochs_mbps', () => {
364364
await assertMultipleBlocksPerSlot(2, logger);
365365
});
366366

367-
it('builds multiple blocks per slot and non-validator re-executes and stores proposed multi-block slots', async () => {
367+
it('builds multiple blocks per slot and non-validators re-execute and sync multi-block slots', async () => {
368368
await setupTest({ syncChainTip: 'proposed', minTxsPerBlock: 1, maxTxsPerBlock: 1 });
369369

370-
logger.warn(`Creating non-validator node`);
370+
logger.warn(`Creating non-validator reexecuting node`);
371371
const nonValidatorNode = await test.createNonValidatorNode({
372372
alwaysReexecuteBlockProposals: true,
373373
skipPushProposedBlocksToArchiver: false,
@@ -413,7 +413,7 @@ describe('e2e_epochs/epochs_mbps', () => {
413413
0.5,
414414
);
415415

416-
// ensure the proposed multi-block slot has valid effects
416+
// Ensure the proposed multi-block slot has valid effects
417417
expect(multiBlockSlotNumber).toBeDefined();
418418
const blocksInSlot = await nonValidatorArchiver.getBlocksForSlot(SlotNumber(multiBlockSlotNumber!));
419419
expect(blocksInSlot.length).toBeGreaterThanOrEqual(2);
@@ -424,15 +424,26 @@ describe('e2e_epochs/epochs_mbps', () => {
424424
const effectsInSlot = await Promise.all(txHashesInSlot.map(txHash => nonValidatorArchiver.getTxEffect(txHash)));
425425
expect(effectsInSlot.every(effect => effect !== undefined)).toBe(true);
426426

427+
// Wait until the node syncs to the checkpointed block successfully
427428
const maxBlockNumberInSlot = Math.max(...blocksInSlot.map(block => block.number));
428429
await retryUntil(
429-
async () => {
430-
const tips = await nonValidatorArchiver.getL2Tips();
431-
return tips.checkpointed.block.number >= maxBlockNumberInSlot;
432-
},
430+
async () => (await nonValidatorArchiver.getL2Tips()).checkpointed.block.number >= maxBlockNumberInSlot!,
433431
'non-validator node to sync checkpointed block',
434432
test.L2_SLOT_DURATION_IN_S * 5,
435433
0.5,
436434
);
435+
436+
// Start a new node an make sure it can sync from scratch including the multi-block slot
437+
logger.warn(`Creating non-validator syncing node`);
438+
const nonValidatorSyncingNode = await test.createNonValidatorNode({
439+
alwaysReexecuteBlockProposals: false,
440+
});
441+
await retryUntil(
442+
async () =>
443+
(await nonValidatorSyncingNode.getBlockSource().getL2Tips()).checkpointed.block.number >= maxBlockNumberInSlot!,
444+
'non-validator syncing node to sync checkpointed block',
445+
test.L2_SLOT_DURATION_IN_S * 10,
446+
0.5,
447+
);
437448
});
438449
});

yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -300,7 +300,7 @@ export class ServerWorldStateSynchronizer
300300
* @returns Whether the block handled was produced by this same node.
301301
*/
302302
private async handleL2Blocks(l2Blocks: L2Block[]) {
303-
this.log.trace(`Handling L2 blocks ${l2Blocks[0].number} to ${l2Blocks.at(-1)!.number}`);
303+
this.log.debug(`Handling L2 blocks ${l2Blocks[0].number} to ${l2Blocks.at(-1)!.number}`);
304304

305305
// Fetch the L1->L2 messages for the first block in a checkpoint.
306306
const messagesForBlocks = new Map<BlockNumber, Fr[]>();
@@ -341,10 +341,12 @@ export class ServerWorldStateSynchronizer
341341
* @returns Whether the block handled was produced by this same node.
342342
*/
343343
private async handleL2Block(l2Block: L2Block, l1ToL2Messages: Fr[]): Promise<WorldStateStatusFull> {
344-
this.log.trace(`Pushing L2 block ${l2Block.number} to merkle tree db `, {
344+
this.log.debug(`Pushing L2 block ${l2Block.number} to merkle tree db `, {
345345
blockNumber: l2Block.number,
346346
blockHash: await l2Block.hash().then(h => h.toString()),
347347
l1ToL2Messages: l1ToL2Messages.map(msg => msg.toString()),
348+
blockHeader: l2Block.header.toInspect(),
349+
blockStats: l2Block.getStats(),
348350
});
349351
const result = await this.merkleTreeDb.handleL2BlockAndMessages(l2Block, l1ToL2Messages);
350352

0 commit comments

Comments
 (0)