From e321449e9b06a47495954bdd837163a1ea349b48 Mon Sep 17 00:00:00 2001 From: LHerskind Date: Thu, 8 Aug 2024 12:46:09 +0000 Subject: [PATCH] chore: try making the archiver less brittle --- .../archiver/src/archiver/archiver.ts | 29 +++++++++++++++---- .../archiver/src/archiver/archiver_store.ts | 2 +- .../kv_archiver_store/block_body_store.ts | 17 ++++++----- .../kv_archiver_store/kv_archiver_store.ts | 2 +- .../memory_archiver_store.ts | 12 +++----- .../composed/integration_l1_publisher.test.ts | 13 +++++---- 6 files changed, 47 insertions(+), 28 deletions(-) diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 3974a8deb9c..594fb8fb062 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -235,6 +235,8 @@ export class Archiver implements ArchiveSource { // the metadata let retrievedBlocks: DataRetrieval; { + // @todo @LHerskind Investigate how necessary that nextExpectedL2BlockNum really is. + // Also, I would expect it to break horribly if we have a reorg. const retrievedBlockMetadata = await retrieveBlockMetadataFromRollup( this.publicClient, this.rollupAddress, @@ -248,15 +250,27 @@ export class Archiver implements ArchiveSource { ([header]) => header.contentCommitment.txsEffectsHash, ); + // @note @LHerskind We will occasionally be hitting this point BEFORE, we have actually retrieved the bodies. + // The main reason this have not been an issue earlier is because: + // i) the design previously published the body in one tx and the header in another, + // which in an anvil auto mine world mean that they are separate blocks. + // ii) We have been lucky that latency have been small enough to not matter. const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes); if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) { - throw new Error('Block headers length does not equal block bodies length'); + this.log.warn('Block headers length does not equal block bodies length'); } - const blocks = retrievedBlockMetadata.retrievedData.map( - (blockMetadata, i) => new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i]), - ); + const blocks: L2Block[] = []; + for (let i = 0; i < retrievedBlockMetadata.retrievedData.length; i++) { + const [header, archive] = retrievedBlockMetadata.retrievedData[i]; + const blockBody = blockBodiesFromStore[i]; + if (blockBody) { + blocks.push(new L2Block(archive, header, blockBody)); + } else { + this.log.warn(`Block body not found for block ${header.globalVariables.blockNumber.toBigInt()}.`); + } + } (blocks.length ? this.log.verbose : this.log.debug)( `Retrieved ${blocks.length || 'no'} new L2 blocks between L1 blocks ${ @@ -264,8 +278,13 @@ export class Archiver implements ArchiveSource { } and ${currentL1BlockNumber}.`, ); + // Set the `lastProcessedL1BlockNumber` to the smallest of the header and body retrieval + const min = (a: bigint, b: bigint) => (a < b ? a : b); retrievedBlocks = { - lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber, + lastProcessedL1BlockNumber: min( + retrievedBlockMetadata.lastProcessedL1BlockNumber, + retrievedBlockBodies.lastProcessedL1BlockNumber, + ), retrievedData: blocks, }; } diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 7e388cbf52f..d2dcd7ae9b1 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -61,7 +61,7 @@ export interface ArchiverDataStore { * @param txsEffectsHashes - A list of txsEffectsHashes. * @returns The requested L2 block bodies */ - getBlockBodies(txsEffectsHashes: Buffer[]): Promise; + getBlockBodies(txsEffectsHashes: Buffer[]): Promise<(Body | undefined)[]>; /** * Gets up to `limit` amount of L2 blocks starting from `from`. diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts index 5e7da7fac20..a34317045cc 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts @@ -30,20 +30,21 @@ export class BlockBodyStore { * @param txsEffectsHashes - The txsEffectsHashes list that corresponds to the blockBodies we want to retrieve * @returns The requested L2 block bodies */ - async getBlockBodies(txsEffectsHashes: Buffer[]): Promise { + async getBlockBodies(txsEffectsHashes: Buffer[]): Promise<(Body | undefined)[]> { const blockBodiesBuffer = await this.db.transaction(() => txsEffectsHashes.map(txsEffectsHash => this.#blockBodies.get(txsEffectsHash.toString('hex'))), ); - if (blockBodiesBuffer.some(bodyBuffer => bodyBuffer === undefined)) { - this.log.error( - 'Block body buffer is undefined', - txsEffectsHashes.map(txsEffectsHash => txsEffectsHash.toString('hex')), - ); - throw new Error('Block body buffer is undefined'); + const blockBodies: (Body | undefined)[] = []; + for (let i = 0; i < blockBodiesBuffer.length; i++) { + const blockBodyBuffer = blockBodiesBuffer[i]; + if (blockBodyBuffer === undefined) { + this.log.warn(`Block body buffer is undefined for txsEffectsHash: ${txsEffectsHashes[i].toString('hex')}`); + } + blockBodies.push(blockBodyBuffer ? Body.fromBuffer(blockBodyBuffer) : undefined); } - return blockBodiesBuffer.map(blockBodyBuffer => Body.fromBuffer(blockBodyBuffer!)); + return blockBodies; } /** diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index b0a17431953..b5bee9f274e 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -111,7 +111,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @param txsEffectsHashes - A list of txsEffectsHashes (body hashes). * @returns The requested L2 block bodies */ - getBlockBodies(txsEffectsHashes: Buffer[]): Promise { + getBlockBodies(txsEffectsHashes: Buffer[]): Promise<(Body | undefined)[]> { return this.#blockBodyStore.getBlockBodies(txsEffectsHashes); } diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 827f0fbd225..cd6bc3278fb 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -177,14 +177,10 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @param txsEffectsHashes - A list of txsEffectsHashes (body hashes). * @returns The requested L2 block bodies */ - getBlockBodies(txsEffectsHashes: Buffer[]): Promise { - const blockBodies = txsEffectsHashes.map(txsEffectsHash => this.l2BlockBodies.get(txsEffectsHash.toString('hex'))); - - if (blockBodies.some(bodyBuffer => bodyBuffer === undefined)) { - throw new Error('Block body is undefined'); - } - - return Promise.resolve(blockBodies as Body[]); + getBlockBodies(txsEffectsHashes: Buffer[]): Promise<(Body | undefined)[]> { + return Promise.resolve( + txsEffectsHashes.map(txsEffectsHash => this.l2BlockBodies.get(txsEffectsHash.toString('hex'))), + ); } /** diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index c77e40f9617..5f7eeb85fa7 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -108,6 +108,7 @@ describe('L1Publisher integration', () => { let feeRecipient: AztecAddress; let ethCheatCodes: EthCheatCodes; + let worldStateSynchronizer: ServerWorldStateSynchronizer; // To update the test data, run "export AZTEC_GENERATE_TEST_DATA=1" in shell and run the tests again // If you have issues with RPC_URL, it is likely that you need to set the RPC_URL in the shell as well @@ -118,10 +119,6 @@ describe('L1Publisher integration', () => { const currentTime = (await publicClient.getBlock()).timestamp; const currentSlot = await rollup.read.getCurrentSlot(); const timestamp = (await rollup.read.getTimestampForSlot([currentSlot + 1n])) - BigInt(ETHEREUM_SLOT_DURATION); - - // @todo @LHerskind figure out why we have issues here if we do not entirely ENTER the next slot - // My guess would be that it is somewhere because of a bad calculation with global variables or such - if (timestamp > currentTime) { await ethCheatCodes.warp(Number(timestamp)); } @@ -167,7 +164,7 @@ describe('L1Publisher integration', () => { l2QueueSize: 10, worldStateProvenBlocksOnly: false, }; - const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig); + worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); builder = await TxProver.new(config, new NoopTelemetryClient()); prover = builder.createBlockProver(builderDb.asLatest()); @@ -371,6 +368,9 @@ describe('L1Publisher integration', () => { let toConsume = await inbox.read.toConsume(); for (let i = 0; i < numberOfConsecutiveBlocks; i++) { + // @note Make sure that the state is up to date before we start building. + await worldStateSynchronizer.syncImmediate(); + const l1ToL2Content = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 128 * i + 1 + 0x400).map(fr); for (let j = 0; j < l1ToL2Content.length; j++) { @@ -484,6 +484,9 @@ describe('L1Publisher integration', () => { const blockNumber = await publicClient.getBlockNumber(); for (let i = 0; i < numberOfConsecutiveBlocks; i++) { + // @note Make sure that the state is up to date before we start building. + await worldStateSynchronizer.syncImmediate(); + const l1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)); const txs = [makeEmptyProcessedTx(), makeEmptyProcessedTx()];