diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index ef8247d0429..627c29badfd 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -10,7 +10,6 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { sleep } from '@aztec/foundation/sleep'; import { AvailabilityOracleAbi, type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MockProxy, mock } from 'jest-mock-extended'; import { @@ -25,6 +24,7 @@ import { import { Archiver } from './archiver.js'; import { type ArchiverDataStore } from './archiver_store.js'; +import { type ArchiverInstrumentation } from './instrumentation.js'; import { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_store.js'; describe('Archiver', () => { @@ -35,17 +35,31 @@ describe('Archiver', () => { const blockNumbers = [1, 2, 3]; let publicClient: MockProxy>; + let instrumentation: MockProxy; let archiverStore: ArchiverDataStore; let proverId: Fr; + let now: number; + + let archiver: Archiver; beforeEach(() => { - publicClient = mock>(); + now = +new Date(); + publicClient = mock>({ + getBlock: ((args: any) => ({ + timestamp: args.blockNumber * 1000n + BigInt(now), + })) as any, + }); + instrumentation = mock({ isEnabled: () => true }); archiverStore = new MemoryArchiverStore(1000); proverId = Fr.random(); }); + afterEach(async () => { + await archiver?.stop(); + }); + it('can start, sync and stop and handle l1 to l2 messages and logs', async () => { - const archiver = new Archiver( + archiver = new Archiver( publicClient, rollupAddress, availabilityOracleAddress, @@ -53,13 +67,14 @@ describe('Archiver', () => { registryAddress, archiverStore, 1000, - new NoopTelemetryClient(), + instrumentation, ); let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); const blocks = blockNumbers.map(x => L2Block.random(x, 4, x, x + 1, 2, 2)); + blocks.forEach((b, i) => (b.header.globalVariables.timestamp = new Fr(now + 1000 * (i + 1)))); const publishTxs = blocks.map(block => block.body).map(makePublishTx); const rollupTxs = blocks.map(makeRollupTx); @@ -157,12 +172,15 @@ describe('Archiver', () => { expect((await archiver.getBlocks(1, 100)).map(b => b.number)).toEqual([1, 2, 3]); expect((await archiver.getBlocks(1, 100, true)).map(b => b.number)).toEqual([1]); - await archiver.stop(); + // Check instrumentation of proven blocks + expect(instrumentation.processProofsVerified).toHaveBeenCalledWith([ + { delay: 1000n, l1BlockNumber: 102n, l2BlockNumber: 1n, proverId: proverId.toString() }, + ]); }, 10_000); it('does not sync past current block number', async () => { const numL2BlocksInTest = 2; - const archiver = new Archiver( + archiver = new Archiver( publicClient, rollupAddress, availabilityOracleAddress, @@ -170,7 +188,7 @@ describe('Archiver', () => { registryAddress, archiverStore, 1000, - new NoopTelemetryClient(), + instrumentation, ); let latestBlockNum = await archiver.getBlockNumber(); @@ -207,8 +225,6 @@ describe('Archiver', () => { latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(numL2BlocksInTest); - - await archiver.stop(); }, 10_000); // logs should be created in order of how archiver syncs. diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 84c9f43dc7e..66b44986cd8 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -24,6 +24,7 @@ import { import { createEthereumChain } from '@aztec/ethereum'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { compactArray, unique } from '@aztec/foundation/collection'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; @@ -46,13 +47,15 @@ import { type Chain, type HttpTransport, type PublicClient, createPublicClient, import { type ArchiverDataStore } from './archiver_store.js'; import { type ArchiverConfig } from './config.js'; import { - type DataRetrieval, retrieveBlockBodiesFromAvailabilityOracle, retrieveBlockMetadataFromRollup, retrieveL1ToL2Messages, retrieveL2ProofVerifiedEvents, } from './data_retrieval.js'; +import { getL1BlockTime } from './eth_log_handlers.js'; import { ArchiverInstrumentation } from './instrumentation.js'; +import { type SingletonDataRetrieval } from './structs/data_retrieval.js'; +import { type L1Published } from './structs/published.js'; /** * Helper interface to combine all sources this archiver implementation provides. @@ -70,9 +73,6 @@ export class Archiver implements ArchiveSource { */ private runningPromise?: RunningPromise; - /** Capture runtime metrics */ - private instrumentation: ArchiverInstrumentation; - /** * Creates a new instance of the Archiver. * @param publicClient - A client for interacting with the Ethereum node. @@ -91,11 +91,9 @@ export class Archiver implements ArchiveSource { private readonly registryAddress: EthAddress, private readonly store: ArchiverDataStore, private readonly pollingIntervalMs = 10_000, - telemetry: TelemetryClient, + private readonly instrumentation: ArchiverInstrumentation, private readonly log: DebugLogger = createDebugLogger('aztec:archiver'), - ) { - this.instrumentation = new ArchiverInstrumentation(telemetry); - } + ) {} /** * Creates a new instance of the Archiver and blocks until it syncs from chain. @@ -125,7 +123,7 @@ export class Archiver implements ArchiveSource { config.l1Contracts.registryAddress, archiverStore, config.archiverPollingIntervalMS, - telemetry, + new ArchiverInstrumentation(telemetry), ); await archiver.start(blockUntilSynced); return archiver; @@ -177,17 +175,25 @@ export class Archiver implements ArchiveSource { * * This code does not handle reorgs. */ - const { blockBodiesSynchedTo, blocksSynchedTo, messagesSynchedTo } = await this.store.getSynchPoint(); + const { blockBodiesSynchedTo, blocksSynchedTo, messagesSynchedTo, provenLogsSynchedTo } = + await this.store.getSynchPoint(); const currentL1BlockNumber = await this.publicClient.getBlockNumber(); if ( currentL1BlockNumber <= blocksSynchedTo && currentL1BlockNumber <= messagesSynchedTo && - currentL1BlockNumber <= blockBodiesSynchedTo + currentL1BlockNumber <= blockBodiesSynchedTo && + currentL1BlockNumber <= provenLogsSynchedTo ) { // chain hasn't moved forward // or it's been rolled back - this.log.debug(`Nothing to sync`, { currentL1BlockNumber, blocksSynchedTo, messagesSynchedTo }); + this.log.debug(`Nothing to sync`, { + currentL1BlockNumber, + blocksSynchedTo, + messagesSynchedTo, + provenLogsSynchedTo, + blockBodiesSynchedTo, + }); return; } @@ -249,9 +255,9 @@ export class Archiver implements ArchiveSource { ); await this.store.addBlockBodies(retrievedBlockBodies); - // Now that we have block bodies we will retrieve block metadata and build L2 blocks from the bodies and - // the metadata - let retrievedBlocks: DataRetrieval; + // Now that we have block bodies we will retrieve block metadata and build L2 blocks from the bodies and the metadata + let retrievedBlocks: L1Published[]; + let lastProcessedL1BlockNumber: bigint; { // @todo @LHerskind Investigate how necessary that nextExpectedL2BlockNum really is. // Also, I would expect it to break horribly if we have a reorg. @@ -265,9 +271,7 @@ export class Archiver implements ArchiveSource { nextExpectedL2BlockNum, ); - const retrievedBodyHashes = retrievedBlockMetadata.retrievedData.map( - ([header]) => header.contentCommitment.txsEffectsHash, - ); + const retrievedBodyHashes = retrievedBlockMetadata.map(([header]) => header.contentCommitment.txsEffectsHash); // @note @LHerskind We will occasionally be hitting this point BEFORE, we have actually retrieved the bodies. // The main reason this have not been an issue earlier is because: @@ -276,16 +280,16 @@ export class Archiver implements ArchiveSource { // ii) We have been lucky that latency have been small enough to not matter. const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes); - if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) { + if (retrievedBlockMetadata.length !== blockBodiesFromStore.length) { this.log.warn('Block headers length does not equal block bodies length'); } - const blocks: L2Block[] = []; - for (let i = 0; i < retrievedBlockMetadata.retrievedData.length; i++) { - const [header, archive] = retrievedBlockMetadata.retrievedData[i]; + const blocks: L1Published[] = []; + for (let i = 0; i < retrievedBlockMetadata.length; i++) { + const [header, archive, l1] = retrievedBlockMetadata[i]; const blockBody = blockBodiesFromStore[i]; if (blockBody) { - blocks.push(new L2Block(archive, header, blockBody)); + blocks.push({ data: new L2Block(archive, header, blockBody), l1 }); } else { this.log.warn(`Block body not found for block ${header.globalVariables.blockNumber.toBigInt()}.`); } @@ -297,62 +301,63 @@ export class Archiver implements ArchiveSource { } and ${currentL1BlockNumber}.`, ); - retrievedBlocks = { - lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber, - retrievedData: blocks, - }; + retrievedBlocks = blocks; + lastProcessedL1BlockNumber = + retrievedBlockMetadata.length > 0 + ? retrievedBlockMetadata[retrievedBlockMetadata.length - 1][2].blockNumber + : blocksSynchedTo; } this.log.debug( - `Processing retrieved blocks ${retrievedBlocks.retrievedData - .map(b => b.number) - .join(',')} with last processed L1 block ${retrievedBlocks.lastProcessedL1BlockNumber}`, + `Processing retrieved blocks ${retrievedBlocks + .map(b => b.data.number) + .join(',')} with last processed L1 block ${lastProcessedL1BlockNumber}`, ); await Promise.all( - retrievedBlocks.retrievedData.map(block => { - const noteEncryptedLogs = block.body.noteEncryptedLogs; - const encryptedLogs = block.body.encryptedLogs; - const unencryptedLogs = block.body.unencryptedLogs; - return this.store.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, block.number); + retrievedBlocks.map(block => { + const noteEncryptedLogs = block.data.body.noteEncryptedLogs; + const encryptedLogs = block.data.body.encryptedLogs; + const unencryptedLogs = block.data.body.unencryptedLogs; + return this.store.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, block.data.number); }), ); // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them await Promise.all( - retrievedBlocks.retrievedData.map(async block => { - const blockLogs = block.body.txEffects + retrievedBlocks.map(async block => { + const blockLogs = block.data.body.txEffects .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : [])) .flatMap(txLog => txLog.unrollLogs()); - await this.storeRegisteredContractClasses(blockLogs, block.number); - await this.storeDeployedContractInstances(blockLogs, block.number); - await this.storeBroadcastedIndividualFunctions(blockLogs, block.number); + await this.storeRegisteredContractClasses(blockLogs, block.data.number); + await this.storeDeployedContractInstances(blockLogs, block.data.number); + await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number); }), ); - if (retrievedBlocks.retrievedData.length > 0) { + if (retrievedBlocks.length > 0) { const timer = new Timer(); await this.store.addBlocks(retrievedBlocks); this.instrumentation.processNewBlocks( - timer.ms() / retrievedBlocks.retrievedData.length, - retrievedBlocks.retrievedData, + timer.ms() / retrievedBlocks.length, + retrievedBlocks.map(b => b.data), ); - const lastL2BlockNumber = retrievedBlocks.retrievedData[retrievedBlocks.retrievedData.length - 1].number; - this.log.verbose(`Processed ${retrievedBlocks.retrievedData.length} new L2 blocks up to ${lastL2BlockNumber}`); + const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number; + this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`); } // Fetch the logs for proven blocks in the block range and update the last proven block number. - // Note it's ok to read repeated data here, since we're just using the largest number we see on the logs. - await this.updateLastProvenL2Block(blocksSynchedTo, currentL1BlockNumber); + if (currentL1BlockNumber > provenLogsSynchedTo) { + await this.updateLastProvenL2Block(provenLogsSynchedTo + 1n, currentL1BlockNumber); + } - if (retrievedBlocks.retrievedData.length > 0 || blockUntilSynced) { + if (retrievedBlocks.length > 0 || blockUntilSynced) { (blockUntilSynced ? this.log.info : this.log.verbose)(`Synced to L1 block ${currentL1BlockNumber}`); } } private async updateLastProvenL2Block(fromBlock: bigint, toBlock: bigint) { const logs = await retrieveL2ProofVerifiedEvents(this.publicClient, this.rollupAddress, fromBlock, toBlock); - const lastLog = logs[logs.length - 1]; if (!lastLog) { return; @@ -363,14 +368,71 @@ export class Archiver implements ArchiveSource { throw new Error(`Missing argument blockNumber from L2ProofVerified event`); } + await this.emitProofVerifiedMetrics(logs); + const currentProvenBlockNumber = await this.store.getProvenL2BlockNumber(); if (provenBlockNumber > currentProvenBlockNumber) { + // Update the last proven block number this.log.verbose(`Updated last proven block number from ${currentProvenBlockNumber} to ${provenBlockNumber}`); - await this.store.setProvenL2BlockNumber(Number(provenBlockNumber)); + await this.store.setProvenL2BlockNumber({ + retrievedData: Number(provenBlockNumber), + lastProcessedL1BlockNumber: lastLog.l1BlockNumber, + }); this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber)); + } else { + // We set the last processed L1 block number to the last L1 block number in the range to avoid duplicate processing + await this.store.setProvenL2BlockNumber({ + retrievedData: Number(currentProvenBlockNumber), + lastProcessedL1BlockNumber: lastLog.l1BlockNumber, + }); } } + /** + * Emits as metrics the block number proven, who proved it, and how much time passed since it was submitted. + * @param logs - The ProofVerified logs to emit metrics for, as collected from `retrieveL2ProofVerifiedEvents`. + **/ + private async emitProofVerifiedMetrics(logs: { l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr }[]) { + if (!logs.length || !this.instrumentation.isEnabled()) { + return; + } + + const l1BlockTimes = new Map( + await Promise.all( + unique(logs.map(log => log.l1BlockNumber)).map( + async blockNumber => [blockNumber, await getL1BlockTime(this.publicClient, blockNumber)] as const, + ), + ), + ); + + // Collect L2 block times for all the blocks verified, this is the time in which the block proven was + // originally submitted to L1, using the L1 timestamp of the transaction. + const getL2BlockTime = async (blockNumber: bigint) => + (await this.store.getBlocks(Number(blockNumber), 1))[0]?.l1.timestamp; + + const l2BlockTimes = new Map( + await Promise.all( + unique(logs.map(log => log.l2BlockNumber)).map( + async blockNumber => [blockNumber, await getL2BlockTime(blockNumber)] as const, + ), + ), + ); + + // Emit the prover id and the time difference between block submission and proof. + this.instrumentation.processProofsVerified( + compactArray( + logs.map(log => { + const l1BlockTime = l1BlockTimes.get(log.l1BlockNumber)!; + const l2BlockTime = l2BlockTimes.get(log.l2BlockNumber); + if (!l2BlockTime) { + return undefined; + } + return { ...log, delay: l1BlockTime - l2BlockTime, proverId: log.proverId.toString() }; + }), + ), + ); + } + /** * Extracts and stores contract classes out of ContractClassRegistered events emitted by the class registerer contract. * @param allLogs - All logs emitted in a bunch of blocks. @@ -469,7 +531,7 @@ export class Archiver implements ArchiveSource { const limitWithProven = proven ? Math.min(limit, Math.max((await this.store.getProvenL2BlockNumber()) - from + 1, 0)) : limit; - return limitWithProven === 0 ? [] : this.store.getBlocks(from, limitWithProven); + return limitWithProven === 0 ? [] : (await this.store.getBlocks(from, limitWithProven)).map(b => b.data); } /** @@ -483,7 +545,7 @@ export class Archiver implements ArchiveSource { number = await this.store.getSynchedL2BlockNumber(); } const blocks = await this.store.getBlocks(number, 1); - return blocks.length === 0 ? undefined : blocks[0]; + return blocks.length === 0 ? undefined : blocks[0].data; } public getTxEffect(txHash: TxHash): Promise { @@ -552,7 +614,7 @@ export class Archiver implements ArchiveSource { } /** Forcefully updates the last proven block number. Use for testing. */ - public setProvenBlockNumber(block: number): Promise { + public setProvenBlockNumber(block: SingletonDataRetrieval): Promise { return this.store.setProvenL2BlockNumber(block); } diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index a9de126ae78..1d7c6c81afe 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -24,7 +24,8 @@ import { type UnconstrainedFunctionWithMembershipProof, } from '@aztec/types/contracts'; -import { type DataRetrieval } from './data_retrieval.js'; +import { type DataRetrieval, type SingletonDataRetrieval } from './structs/data_retrieval.js'; +import { type L1Published } from './structs/published.js'; /** * Represents the latest L1 block processed by the archiver for various objects in L2. @@ -36,6 +37,8 @@ export type ArchiverL1SynchPoint = { blockBodiesSynchedTo: bigint; /** Number of the last L1 block that added L1 -> L2 messages from the Inbox. */ messagesSynchedTo: bigint; + /** Number of the last L1 block that added a new proven block. */ + provenLogsSynchedTo: bigint; }; /** @@ -48,7 +51,7 @@ export interface ArchiverDataStore { * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - addBlocks(blocks: DataRetrieval): Promise; + addBlocks(blocks: L1Published[]): Promise; /** * Append new block bodies to the store's list. @@ -71,7 +74,7 @@ export interface ArchiverDataStore { * @param limit - The number of blocks to return. * @returns The requested L2 blocks. */ - getBlocks(from: number, limit: number): Promise; + getBlocks(from: number, limit: number): Promise[]>; /** * Gets a tx effect. @@ -160,7 +163,7 @@ export interface ArchiverDataStore { * Stores the number of the latest proven L2 block processed. * @param l2BlockNumber - The number of the latest proven L2 block processed. */ - setProvenL2BlockNumber(l2BlockNumber: number): Promise; + setProvenL2BlockNumber(l2BlockNumber: SingletonDataRetrieval): Promise; /** * Gets the synch point of the archiver diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index f523cd6a0a7..3c6127e5b16 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -15,7 +15,8 @@ import { } from '@aztec/types/contracts'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js'; -import { type DataRetrieval } from './data_retrieval.js'; +import { type DataRetrieval } from './structs/data_retrieval.js'; +import { type L1Published } from './structs/published.js'; /** * @param testName - The name of the test suite. @@ -24,24 +25,24 @@ import { type DataRetrieval } from './data_retrieval.js'; export function describeArchiverDataStore(testName: string, getStore: () => ArchiverDataStore) { describe(testName, () => { let store: ArchiverDataStore; - let blocks: DataRetrieval; + let blocks: L1Published[]; let blockBodies: DataRetrieval; - const blockTests: [number, number, () => L2Block[]][] = [ - [1, 1, () => blocks.retrievedData.slice(0, 1)], - [10, 1, () => blocks.retrievedData.slice(9, 10)], - [1, 10, () => blocks.retrievedData.slice(0, 10)], - [2, 5, () => blocks.retrievedData.slice(1, 6)], - [5, 2, () => blocks.retrievedData.slice(4, 6)], + const blockTests: [number, number, () => L1Published[]][] = [ + [1, 1, () => blocks.slice(0, 1)], + [10, 1, () => blocks.slice(9, 10)], + [1, 10, () => blocks.slice(0, 10)], + [2, 5, () => blocks.slice(1, 6)], + [5, 2, () => blocks.slice(4, 6)], ]; beforeEach(() => { store = getStore(); - blocks = { - lastProcessedL1BlockNumber: 5n, - retrievedData: Array.from({ length: 10 }).map((_, i) => L2Block.random(i + 1)), - }; + blocks = times(10, i => ({ + data: L2Block.random(i + 1), + l1: { blockNumber: BigInt(i + 10), blockHash: `0x${i}`, timestamp: BigInt(i * 1000) }, + })); blockBodies = { - retrievedData: blocks.retrievedData.map(block => block.body), + retrievedData: blocks.map(block => block.data.body), lastProcessedL1BlockNumber: 4n, }; }); @@ -80,7 +81,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('resets `from` to the first block if it is out of range', async () => { - await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.retrievedData.slice(0, 1)); + await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.slice(0, 1)); }); }); @@ -91,7 +92,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it("returns the most recently added block's number", async () => { await store.addBlocks(blocks); - await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.retrievedData.at(-1)!.number); + await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.at(-1)!.data.number); }); }); @@ -101,15 +102,17 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch blocksSynchedTo: 0n, messagesSynchedTo: 0n, blockBodiesSynchedTo: 0n, + provenLogsSynchedTo: 0n, } satisfies ArchiverL1SynchPoint); }); it('returns the L1 block number in which the most recent L2 block was published', async () => { await store.addBlocks(blocks); await expect(store.getSynchPoint()).resolves.toEqual({ - blocksSynchedTo: blocks.lastProcessedL1BlockNumber, + blocksSynchedTo: 19n, messagesSynchedTo: 0n, blockBodiesSynchedTo: 0n, + provenLogsSynchedTo: 0n, } satisfies ArchiverL1SynchPoint); }); @@ -119,6 +122,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch blocksSynchedTo: 0n, messagesSynchedTo: 0n, blockBodiesSynchedTo: blockBodies.lastProcessedL1BlockNumber, + provenLogsSynchedTo: 0n, } satisfies ArchiverL1SynchPoint); }); @@ -131,18 +135,30 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch blocksSynchedTo: 0n, messagesSynchedTo: 1n, blockBodiesSynchedTo: 0n, + provenLogsSynchedTo: 0n, + } satisfies ArchiverL1SynchPoint); + }); + + it('returns the L1 block number that most recently logged a proven block', async () => { + await store.setProvenL2BlockNumber({ lastProcessedL1BlockNumber: 3n, retrievedData: 5 }); + await expect(store.getSynchPoint()).resolves.toEqual({ + blocksSynchedTo: 0n, + messagesSynchedTo: 0n, + blockBodiesSynchedTo: 0n, + provenLogsSynchedTo: 3n, } satisfies ArchiverL1SynchPoint); }); }); describe('addLogs', () => { it('adds encrypted & unencrypted logs', async () => { + const block = blocks[0].data; await expect( store.addLogs( - blocks.retrievedData[0].body.noteEncryptedLogs, - blocks.retrievedData[0].body.encryptedLogs, - blocks.retrievedData[0].body.unencryptedLogs, - blocks.retrievedData[0].number, + block.body.noteEncryptedLogs, + block.body.encryptedLogs, + block.body.unencryptedLogs, + block.number, ), ).resolves.toEqual(true); }); @@ -155,12 +171,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch ])('getLogs (%s)', (_, logType) => { beforeEach(async () => { await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => store.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); @@ -170,12 +186,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const expectedLogs = getExpectedBlocks().map(block => { switch (logType) { case LogType.ENCRYPTED: - return block.body.encryptedLogs; + return block.data.body.encryptedLogs; case LogType.NOTEENCRYPTED: - return block.body.noteEncryptedLogs; + return block.data.body.noteEncryptedLogs; case LogType.UNENCRYPTED: default: - return block.body.unencryptedLogs; + return block.data.body.unencryptedLogs; } }); const actualLogs = await store.getLogs(from, limit, logType); @@ -186,12 +202,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('getTxEffect', () => { beforeEach(async () => { await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => store.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); @@ -200,11 +216,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it.each([ - () => blocks.retrievedData[0].body.txEffects[0], - () => blocks.retrievedData[9].body.txEffects[3], - () => blocks.retrievedData[3].body.txEffects[1], - () => blocks.retrievedData[5].body.txEffects[2], - () => blocks.retrievedData[1].body.txEffects[0], + () => blocks[0].data.body.txEffects[0], + () => blocks[9].data.body.txEffects[3], + () => blocks[3].data.body.txEffects[1], + () => blocks[5].data.body.txEffects[2], + () => blocks[1].data.body.txEffects[0], ])('retrieves a previously stored transaction', async getExpectedTx => { const expectedTx = getExpectedTx(); const actualTx = await store.getTxEffect(expectedTx.txHash); @@ -339,28 +355,24 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const numPublicFunctionCalls = 3; const numUnencryptedLogs = 2; const numBlocks = 10; - let blocks: DataRetrieval; + let blocks: L1Published[]; beforeEach(async () => { - blocks = { - lastProcessedL1BlockNumber: 4n, - retrievedData: Array(numBlocks) - .fill(0) - .map((_, index: number) => - L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), - ), - }; + blocks = times(numBlocks, (index: number) => ({ + data: L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), + l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) }, + })); await store.addBlocks(blocks); await store.addBlockBodies(blockBodies); await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => store.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); @@ -370,7 +382,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch // get random tx const targetBlockIndex = randomInt(numBlocks); const targetTxIndex = randomInt(txsPerBlock); - const targetTxHash = blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].txHash; + const targetTxHash = blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash; const response = await store.getUnencryptedLogs({ txHash: targetTxHash }); const logs = response.logs; @@ -414,7 +426,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const targetFunctionLogIndex = randomInt(numPublicFunctionCalls); const targetLogIndex = randomInt(numUnencryptedLogs); const targetContractAddress = - blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[ + blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[ targetFunctionLogIndex ].logs[targetLogIndex].contractAddress; diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index dce7a224dfb..bff5c815f34 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -14,20 +14,8 @@ import { processMessageSentLogs, processTxsPublishedLogs, } from './eth_log_handlers.js'; - -/** - * Data retrieved from logs - */ -export type DataRetrieval = { - /** - * Blocknumber of the last L1 block from which we obtained data. - */ - lastProcessedL1BlockNumber: bigint; - /** - * The data returned. - */ - retrievedData: T[]; -}; +import { type DataRetrieval } from './structs/data_retrieval.js'; +import { type L1PublishedData } from './structs/published.js'; /** * Fetches new L2 block metadata (header, archive snapshot). @@ -47,8 +35,8 @@ export async function retrieveBlockMetadataFromRollup( searchEndBlock: bigint, expectedNextL2BlockNum: bigint, logger: DebugLogger = createDebugLogger('aztec:archiver'), -): Promise> { - const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = []; +): Promise<[Header, AppendOnlyTreeSnapshot, L1PublishedData][]> { + const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, L1PublishedData][] = []; do { if (searchStartBlock > searchEndBlock) { break; @@ -77,7 +65,7 @@ export async function retrieveBlockMetadataFromRollup( searchStartBlock = lastLog.blockNumber! + 1n; expectedNextL2BlockNum += BigInt(newBlockMetadata.length); } while (blockUntilSynced && searchStartBlock <= searchEndBlock); - return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedBlockMetadata }; + return retrievedBlockMetadata; } /** diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index a3643cd6491..3baeecfd35b 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -16,6 +16,8 @@ import { slice, } from 'viem'; +import { type L1PublishedData } from './structs/published.js'; + /** * Processes newly received MessageSent (L1 to L2) logs. * @param logs - MessageSent logs. @@ -43,8 +45,8 @@ export async function processL2BlockProcessedLogs( publicClient: PublicClient, expectedL2BlockNumber: bigint, logs: Log[], -): Promise<[Header, AppendOnlyTreeSnapshot][]> { - const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = []; +): Promise<[Header, AppendOnlyTreeSnapshot, L1PublishedData][]> { + const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, L1PublishedData][] = []; for (const log of logs) { const blockNum = log.args.blockNumber; if (blockNum !== expectedL2BlockNumber) { @@ -57,13 +59,24 @@ export async function processL2BlockProcessedLogs( log.args.blockNumber, ); - retrievedBlockMetadata.push([header, archive]); + const l1: L1PublishedData = { + blockNumber: log.blockNumber, + blockHash: log.blockHash, + timestamp: await getL1BlockTime(publicClient, log.blockNumber), + }; + + retrievedBlockMetadata.push([header, archive, l1]); expectedL2BlockNumber++; } return retrievedBlockMetadata; } +export async function getL1BlockTime(publicClient: PublicClient, blockNumber: bigint): Promise { + const block = await publicClient.getBlock({ blockNumber, includeTransactions: false }); + return block.timestamp; +} + export async function processTxsPublishedLogs( publicClient: PublicClient, logs: Log[], diff --git a/yarn-project/archiver/src/archiver/index.ts b/yarn-project/archiver/src/archiver/index.ts index 81aa8727e17..6d1c72a21ba 100644 --- a/yarn-project/archiver/src/archiver/index.ts +++ b/yarn-project/archiver/src/archiver/index.ts @@ -1,5 +1,6 @@ export * from './archiver.js'; export * from './config.js'; +export { type L1Published, type L1PublishedData } from './structs/published.js'; export { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_store.js'; export { ArchiverDataStore } from './archiver_store.js'; export { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; diff --git a/yarn-project/archiver/src/archiver/instrumentation.ts b/yarn-project/archiver/src/archiver/instrumentation.ts index 7f42c594a9b..6a53027f460 100644 --- a/yarn-project/archiver/src/archiver/instrumentation.ts +++ b/yarn-project/archiver/src/archiver/instrumentation.ts @@ -1,20 +1,27 @@ import { type L2Block } from '@aztec/circuit-types'; +import { createDebugLogger } from '@aztec/foundation/log'; import { Attributes, type Gauge, type Histogram, Metrics, type TelemetryClient, + type UpDownCounter, ValueType, exponentialBuckets, + millisecondBuckets, } from '@aztec/telemetry-client'; export class ArchiverInstrumentation { private blockHeight: Gauge; private blockSize: Gauge; private syncDuration: Histogram; + private proofsSubmittedDelay: Histogram; + private proofsSubmittedCount: UpDownCounter; - constructor(telemetry: TelemetryClient) { + private log = createDebugLogger('aztec:archiver:instrumentation'); + + constructor(private telemetry: TelemetryClient) { const meter = telemetry.getMeter('Archiver'); this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT, { description: 'The height of the latest block processed by the archiver', @@ -34,6 +41,24 @@ export class ArchiverInstrumentation { explicitBucketBoundaries: exponentialBuckets(1, 16), }, }); + + this.proofsSubmittedCount = meter.createUpDownCounter(Metrics.ARCHIVER_ROLLUP_PROOF_COUNT, { + description: 'Number of proofs submitted', + valueType: ValueType.INT, + }); + + this.proofsSubmittedDelay = meter.createHistogram(Metrics.ARCHIVER_ROLLUP_PROOF_DELAY, { + unit: 'ms', + description: 'Time after a block is submitted until its proof is published', + valueType: ValueType.INT, + advice: { + explicitBucketBoundaries: millisecondBuckets(1, 80), // 10ms -> ~3hs + }, + }); + } + + public isEnabled(): boolean { + return this.telemetry.isEnabled(); } public processNewBlocks(syncTimePerBlock: number, blocks: L2Block[]) { @@ -47,4 +72,17 @@ export class ArchiverInstrumentation { public updateLastProvenBlock(blockNumber: number) { this.blockHeight.record(blockNumber, { [Attributes.STATUS]: 'proven' }); } + + public processProofsVerified(logs: { proverId: string; l2BlockNumber: bigint; delay: bigint }[]) { + for (const log of logs) { + this.log.debug('Recording proof verified event', log); + this.proofsSubmittedCount.add(1, { + [Attributes.ROLLUP_PROVER_ID]: log.proverId, + [Attributes.PROOF_TIMED_OUT]: log.delay > 20n * 60n * 1000n, + }); + this.proofsSubmittedDelay.record(Math.ceil(Number(log.delay)), { + [Attributes.ROLLUP_PROVER_ID]: log.proverId, + }); + } + } } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts index 006e389b267..9f1c5b3ac64 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts @@ -2,7 +2,7 @@ import { Body } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval } from '../structs/data_retrieval.js'; export class BlockBodyStore { /** Map block body hash to block body */ diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index c054abe73fd..2fb206487b6 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -3,7 +3,7 @@ import { AppendOnlyTreeSnapshot, type AztecAddress, Header, INITIAL_L2_BLOCK_NUM import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap, type AztecSingleton, type Range } from '@aztec/kv-store'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type L1Published, type L1PublishedData } from '../structs/published.js'; import { type BlockBodyStore } from './block_body_store.js'; type BlockIndexValue = [blockNumber: number, index: number]; @@ -11,6 +11,7 @@ type BlockIndexValue = [blockNumber: number, index: number]; type BlockStorage = { header: Buffer; archive: Buffer; + l1: L1PublishedData; }; /** @@ -22,9 +23,6 @@ export class BlockStore { /** Stores L1 block number in which the last processed L2 block was included */ #lastSynchedL1Block: AztecSingleton; - /** Stores last proven L2 block number */ - #lastProvenL2Block: AztecSingleton; - /** Index mapping transaction hash (as a string) to its location in a block */ #txIndex: AztecMap; @@ -42,28 +40,32 @@ export class BlockStore { this.#txIndex = db.openMap('archiver_tx_index'); this.#contractIndex = db.openMap('archiver_contract_index'); this.#lastSynchedL1Block = db.openSingleton('archiver_last_synched_l1_block'); - this.#lastProvenL2Block = db.openSingleton('archiver_last_proven_l2_block'); } /** * Append new blocks to the store's list. - * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. + * @param blocks - The L2 blocks to be added to the store. * @returns True if the operation is successful. */ - addBlocks(blocks: DataRetrieval): Promise { + addBlocks(blocks: L1Published[]): Promise { + if (blocks.length === 0) { + return Promise.resolve(true); + } + return this.db.transaction(() => { - for (const block of blocks.retrievedData) { - void this.#blocks.set(block.number, { - header: block.header.toBuffer(), - archive: block.archive.toBuffer(), + for (const block of blocks) { + void this.#blocks.set(block.data.number, { + header: block.data.header.toBuffer(), + archive: block.data.archive.toBuffer(), + l1: block.l1, }); - block.body.txEffects.forEach((tx, i) => { - void this.#txIndex.set(tx.txHash.toString(), [block.number, i]); + block.data.body.txEffects.forEach((tx, i) => { + void this.#txIndex.set(tx.txHash.toString(), [block.data.number, i]); }); } - void this.#lastSynchedL1Block.set(blocks.lastProcessedL1BlockNumber); + void this.#lastSynchedL1Block.set(blocks[blocks.length - 1].l1.blockNumber); return true; }); @@ -75,7 +77,7 @@ export class BlockStore { * @param limit - The number of blocks to return. * @returns The requested L2 blocks */ - *getBlocks(start: number, limit: number): IterableIterator { + *getBlocks(start: number, limit: number): IterableIterator> { for (const blockStorage of this.#blocks.values(this.#computeBlockRange(start, limit))) { yield this.getBlockFromBlockStorage(blockStorage); } @@ -86,7 +88,7 @@ export class BlockStore { * @param blockNumber - The number of the block to return. * @returns The requested L2 block. */ - getBlock(blockNumber: number): L2Block | undefined { + getBlock(blockNumber: number): L1Published | undefined { const blockStorage = this.#blocks.get(blockNumber); if (!blockStorage || !blockStorage.header) { return undefined; @@ -104,11 +106,8 @@ export class BlockStore { throw new Error('Body is not able to be retrieved from BodyStore'); } - return L2Block.fromFields({ - header, - archive, - body, - }); + const l2Block = L2Block.fromFields({ header, archive, body }); + return { data: l2Block, l1: blockStorage.l1 }; } /** @@ -123,7 +122,7 @@ export class BlockStore { } const block = this.getBlock(blockNumber); - return block?.body.txEffects[txIndex]; + return block?.data.body.txEffects[txIndex]; } /** @@ -138,15 +137,15 @@ export class BlockStore { } const block = this.getBlock(blockNumber)!; - const tx = block.body.txEffects[txIndex]; + const tx = block.data.body.txEffects[txIndex]; return new TxReceipt( txHash, TxReceipt.statusFromRevertCode(tx.revertCode), '', tx.transactionFee.toBigInt(), - block.hash().toBuffer(), - block.number, + block.data.hash().toBuffer(), + block.data.number, ); } @@ -185,14 +184,6 @@ export class BlockStore { return this.#lastSynchedL1Block.get() ?? 0n; } - getProvenL2BlockNumber(): number { - return this.#lastProvenL2Block.get() ?? 0; - } - - async setProvenL2BlockNumber(blockNumber: number) { - await this.#lastProvenL2Block.set(blockNumber); - } - #computeBlockRange(start: number, limit: number): Required, 'start' | 'end'>> { if (limit < 1) { throw new Error(`Invalid limit: ${limit}`); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 2779d54579a..7a71f7861ea 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -27,7 +27,8 @@ import { } from '@aztec/types/contracts'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from '../archiver_store.js'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval, type SingletonDataRetrieval } from '../structs/data_retrieval.js'; +import { type L1Published } from '../structs/published.js'; import { BlockBodyStore } from './block_body_store.js'; import { BlockStore } from './block_store.js'; import { ContractArtifactsStore } from './contract_artifacts_store.js'; @@ -35,13 +36,15 @@ import { ContractClassStore } from './contract_class_store.js'; import { ContractInstanceStore } from './contract_instance_store.js'; import { LogStore } from './log_store.js'; import { MessageStore } from './message_store.js'; +import { ProvenStore } from './proven_store.js'; /** * LMDB implementation of the ArchiverDataStore interface. */ export class KVArchiverDataStore implements ArchiverDataStore { - #blockStore: BlockStore; #blockBodyStore: BlockBodyStore; + #blockStore: BlockStore; + #provenStore: ProvenStore; #logStore: LogStore; #messageStore: MessageStore; #contractClassStore: ContractClassStore; @@ -53,6 +56,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { constructor(db: AztecKVStore, logsMaxPageSize: number = 1000) { this.#blockBodyStore = new BlockBodyStore(db); this.#blockStore = new BlockStore(db, this.#blockBodyStore); + this.#provenStore = new ProvenStore(db); this.#logStore = new LogStore(db, this.#blockStore, logsMaxPageSize); this.#messageStore = new MessageStore(db); this.#contractClassStore = new ContractClassStore(db); @@ -120,7 +124,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - addBlocks(blocks: DataRetrieval): Promise { + addBlocks(blocks: L1Published[]): Promise { return this.#blockStore.addBlocks(blocks); } @@ -131,7 +135,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @param limit - The number of blocks to return. * @returns The requested L2 blocks */ - getBlocks(start: number, limit: number): Promise { + getBlocks(start: number, limit: number): Promise[]> { try { return Promise.resolve(Array.from(this.#blockStore.getBlocks(start, limit))); } catch (err) { @@ -247,11 +251,11 @@ export class KVArchiverDataStore implements ArchiverDataStore { } getProvenL2BlockNumber(): Promise { - return Promise.resolve(this.#blockStore.getProvenL2BlockNumber()); + return Promise.resolve(this.#provenStore.getProvenL2BlockNumber()); } - async setProvenL2BlockNumber(blockNumber: number) { - await this.#blockStore.setProvenL2BlockNumber(blockNumber); + async setProvenL2BlockNumber(blockNumber: SingletonDataRetrieval) { + await this.#provenStore.setProvenL2BlockNumber(blockNumber); } /** @@ -262,6 +266,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { blocksSynchedTo: this.#blockStore.getSynchedL1BlockNumber(), blockBodiesSynchedTo: this.#blockBodyStore.getSynchedL1BlockNumber(), messagesSynchedTo: this.#messageStore.getSynchedL1BlockNumber(), + provenLogsSynchedTo: this.#provenStore.getSynchedL1BlockNumber(), }); } } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts index 8b6023ad65c..0d412b4b70e 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts @@ -8,7 +8,7 @@ import { import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval } from '../structs/data_retrieval.js'; /** * LMDB implementation of the ArchiverDataStore interface. diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/proven_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/proven_store.ts new file mode 100644 index 00000000000..2009ce80627 --- /dev/null +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/proven_store.ts @@ -0,0 +1,34 @@ +import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; + +import { type SingletonDataRetrieval } from '../structs/data_retrieval.js'; + +export class ProvenStore { + /** Stores L1 block number in which the last processed L2 block was included */ + #lastSynchedL1Block: AztecSingleton; + + /** Stores last proven L2 block number */ + #lastProvenL2Block: AztecSingleton; + + constructor(private db: AztecKVStore) { + this.#lastSynchedL1Block = db.openSingleton('archiver_last_l1_block_proven_logs'); + this.#lastProvenL2Block = db.openSingleton('archiver_last_proven_l2_block'); + } + + /** + * Gets the most recent L1 block processed. + */ + getSynchedL1BlockNumber(): bigint { + return this.#lastSynchedL1Block.get() ?? 0n; + } + + getProvenL2BlockNumber(): number { + return this.#lastProvenL2Block.get() ?? 0; + } + + async setProvenL2BlockNumber(blockNumber: SingletonDataRetrieval) { + await this.db.transaction(() => { + void this.#lastProvenL2Block.set(blockNumber.retrievedData); + void this.#lastSynchedL1Block.set(blockNumber.lastProcessedL1BlockNumber); + }); + } +} diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts index 51f93c71c86..de8237cf336 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts @@ -1,4 +1,5 @@ import { L2Block } from '@aztec/circuit-types'; +import { times } from '@aztec/foundation/collection'; import { type ArchiverDataStore } from '../archiver_store.js'; import { describeArchiverDataStore } from '../archiver_store_test_suite.js'; @@ -17,21 +18,19 @@ describe('MemoryArchiverStore', () => { it('does not return more than "maxLogs" logs', async () => { const maxLogs = 5; archiverStore = new MemoryArchiverStore(maxLogs); - const blocks = { - lastProcessedL1BlockNumber: 3n, - retrievedData: Array(10) - .fill(0) - .map((_, index: number) => L2Block.random(index + 1, 4, 2, 3, 2, 2)), - }; + const blocks = times(10, (index: number) => ({ + data: L2Block.random(index + 1, 4, 2, 3, 2, 2), + l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) }, + })); await archiverStore.addBlocks(blocks); await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => archiverStore.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index d9a2d5afc0b..27e69b174e3 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -27,7 +27,8 @@ import { } from '@aztec/types/contracts'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from '../archiver_store.js'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval, type SingletonDataRetrieval } from '../structs/data_retrieval.js'; +import { type L1Published } from '../structs/published.js'; import { L1ToL2MessageStore } from './l1_to_l2_message_store.js'; /** @@ -37,7 +38,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { /** * An array containing all the L2 blocks that have been fetched so far. */ - private l2Blocks: L2Block[] = []; + private l2Blocks: L1Published[] = []; /** * A mapping of body hash to body @@ -85,6 +86,8 @@ export class MemoryArchiverStore implements ArchiverDataStore { private lastL1BlockNewBlocks: bigint = 0n; private lastL1BlockNewBlockBodies: bigint = 0n; private lastL1BlockNewMessages: bigint = 0n; + private lastL1BlockNewProvenLogs: bigint = 0n; + private lastProvenL2BlockNumber: number = 0; constructor( @@ -152,10 +155,14 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - public addBlocks(blocks: DataRetrieval): Promise { - this.lastL1BlockNewBlocks = blocks.lastProcessedL1BlockNumber; - this.l2Blocks.push(...blocks.retrievedData); - this.txEffects.push(...blocks.retrievedData.flatMap(b => b.body.txEffects)); + public addBlocks(blocks: L1Published[]): Promise { + if (blocks.length === 0) { + return Promise.resolve(true); + } + + this.lastL1BlockNewBlocks = blocks[blocks.length - 1].l1.blockNumber; + this.l2Blocks.push(...blocks); + this.txEffects.push(...blocks.flatMap(b => b.data.body.txEffects)); return Promise.resolve(true); } @@ -246,7 +253,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @returns The requested L2 blocks. * @remarks When "from" is smaller than genesis block number, blocks from the beginning are returned. */ - public getBlocks(from: number, limit: number): Promise { + public getBlocks(from: number, limit: number): Promise[]> { // Return an empty array if we are outside of range if (limit < 1) { return Promise.reject(new Error(`Invalid limit: ${limit}`)); @@ -278,7 +285,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { */ public getSettledTxReceipt(txHash: TxHash): Promise { for (const block of this.l2Blocks) { - for (const txEffect of block.body.txEffects) { + for (const txEffect of block.data.body.txEffects) { if (txEffect.txHash.equals(txHash)) { return Promise.resolve( new TxReceipt( @@ -286,8 +293,8 @@ export class MemoryArchiverStore implements ArchiverDataStore { TxReceipt.statusFromRevertCode(txEffect.revertCode), '', txEffect.transactionFee.toBigInt(), - block.hash().toBuffer(), - block.number, + block.data.hash().toBuffer(), + block.data.number, ), ); } @@ -397,10 +404,10 @@ export class MemoryArchiverStore implements ArchiverDataStore { for (; logIndexInTx < txLogs.length; logIndexInTx++) { const log = txLogs[logIndexInTx]; if ( - (!txHash || block.body.txEffects[txIndexInBlock].txHash.equals(txHash)) && + (!txHash || block.data.body.txEffects[txIndexInBlock].txHash.equals(txHash)) && (!contractAddress || log.contractAddress.equals(contractAddress)) ) { - logs.push(new ExtendedUnencryptedL2Log(new LogId(block.number, txIndexInBlock, logIndexInTx), log)); + logs.push(new ExtendedUnencryptedL2Log(new LogId(block.data.number, txIndexInBlock, logIndexInTx), log)); if (logs.length === this.maxLogs) { return Promise.resolve({ logs, @@ -428,15 +435,16 @@ export class MemoryArchiverStore implements ArchiverDataStore { if (this.l2Blocks.length === 0) { return Promise.resolve(INITIAL_L2_BLOCK_NUM - 1); } - return Promise.resolve(this.l2Blocks[this.l2Blocks.length - 1].number); + return Promise.resolve(this.l2Blocks[this.l2Blocks.length - 1].data.number); } public getProvenL2BlockNumber(): Promise { return Promise.resolve(this.lastProvenL2BlockNumber); } - public setProvenL2BlockNumber(l2BlockNumber: number): Promise { - this.lastProvenL2BlockNumber = l2BlockNumber; + public setProvenL2BlockNumber(l2BlockNumber: SingletonDataRetrieval): Promise { + this.lastProvenL2BlockNumber = l2BlockNumber.retrievedData; + this.lastL1BlockNewProvenLogs = l2BlockNumber.lastProcessedL1BlockNumber; return Promise.resolve(); } @@ -445,6 +453,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { blocksSynchedTo: this.lastL1BlockNewBlocks, messagesSynchedTo: this.lastL1BlockNewMessages, blockBodiesSynchedTo: this.lastL1BlockNewBlockBodies, + provenLogsSynchedTo: this.lastL1BlockNewProvenLogs, }); } diff --git a/yarn-project/archiver/src/archiver/structs/data_retrieval.ts b/yarn-project/archiver/src/archiver/structs/data_retrieval.ts new file mode 100644 index 00000000000..215d54356f4 --- /dev/null +++ b/yarn-project/archiver/src/archiver/structs/data_retrieval.ts @@ -0,0 +1,27 @@ +/** + * Data retrieved from logs + */ +export type DataRetrieval = { + /** + * Blocknumber of the last L1 block from which we obtained data. + */ + lastProcessedL1BlockNumber: bigint; + /** + * The data returned. + */ + retrievedData: T[]; +}; + +/** + * Data retrieved from logs + */ +export type SingletonDataRetrieval = { + /** + * Blocknumber of the last L1 block from which we obtained data. + */ + lastProcessedL1BlockNumber: bigint; + /** + * The data returned. + */ + retrievedData: T; +}; diff --git a/yarn-project/archiver/src/archiver/structs/published.ts b/yarn-project/archiver/src/archiver/structs/published.ts new file mode 100644 index 00000000000..12a469c1715 --- /dev/null +++ b/yarn-project/archiver/src/archiver/structs/published.ts @@ -0,0 +1,11 @@ +/** Extends a type with L1 published info (block number, hash, and timestamp) */ +export type L1Published = { + data: T; + l1: L1PublishedData; +}; + +export type L1PublishedData = { + blockNumber: bigint; + timestamp: bigint; + blockHash: string; +}; diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 7b8fa78a625..523565aea4e 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -6,6 +6,7 @@ import { createPublicClient, http } from 'viem'; import { localhost } from 'viem/chains'; import { Archiver, getArchiverConfigFromEnv } from './archiver/index.js'; +import { ArchiverInstrumentation } from './archiver/instrumentation.js'; import { MemoryArchiverStore } from './archiver/memory_archiver_store/memory_archiver_store.js'; export * from './archiver/index.js'; @@ -40,7 +41,7 @@ async function main() { l1Contracts.registryAddress, archiverStore, 1000, - new NoopTelemetryClient(), + new ArchiverInstrumentation(new NoopTelemetryClient()), ); const shutdown = async () => { diff --git a/yarn-project/foundation/src/collection/array.test.ts b/yarn-project/foundation/src/collection/array.test.ts index ccff55b1839..97bee2fd7f1 100644 --- a/yarn-project/foundation/src/collection/array.test.ts +++ b/yarn-project/foundation/src/collection/array.test.ts @@ -1,4 +1,4 @@ -import { removeArrayPaddingEnd, times } from './array.js'; +import { compactArray, removeArrayPaddingEnd, times, unique } from './array.js'; describe('times', () => { it('should return an array with the result from all executions', () => { @@ -33,3 +33,31 @@ describe('removeArrayPaddingEnd', () => { expect(removeArrayPaddingEnd([0, 0, 0], i => i === 0)).toEqual([]); }); }); + +describe('compactArray', () => { + it('works as expected', () => { + expect(compactArray([3, undefined, 4, undefined])).toEqual([3, 4]); + }); + + it('handles an empty array', () => { + expect(compactArray([])).toEqual([]); + }); + + it('handles an array with just undefineds', () => { + expect(compactArray([undefined, undefined])).toEqual([]); + }); + + it('handles an array with no undefineds', () => { + expect(compactArray([2, 3])).toEqual([2, 3]); + }); + + it('does not remove falsey values', () => { + expect(compactArray([0, null, false, '', [], undefined])).toEqual([0, null, false, '', []]); + }); +}); + +describe('unique', () => { + it('works with bigints', () => { + expect(unique([1n, 2n, 1n])).toEqual([1n, 2n]); + }); +}); diff --git a/yarn-project/foundation/src/collection/array.ts b/yarn-project/foundation/src/collection/array.ts index 23c75cc8e4f..6f2262a1af3 100644 --- a/yarn-project/foundation/src/collection/array.ts +++ b/yarn-project/foundation/src/collection/array.ts @@ -82,3 +82,21 @@ export function arraySerializedSizeOfNonEmpty( .map(x => x!.toBuffer().length) .reduce((a, b) => a + b, 0); } + +/** + * Removes duplicates from the given array. + * @param arr - The array. + * @returns A new array. + */ +export function unique(arr: T[]): T[] { + return [...new Set(arr)]; +} + +/** + * Removes all undefined elements from the array. + * @param arr - The array. + * @returns A new array. + */ +export function compactArray(arr: (T | undefined)[]): T[] { + return arr.filter((x: T | undefined): x is T => x !== undefined); +} diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 7ea62671292..a6dc55feb8b 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -121,7 +121,7 @@ export class TxProver implements ProverClient { ? new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath) : undefined; - return new TestCircuitProver(telemetry, simulationProvider); + return new TestCircuitProver(telemetry, simulationProvider, config); } public getProvingJobSource(): ProvingJobSource { diff --git a/yarn-project/telemetry-client/src/attributes.ts b/yarn-project/telemetry-client/src/attributes.ts index df65306f493..34ad82007fd 100644 --- a/yarn-project/telemetry-client/src/attributes.ts +++ b/yarn-project/telemetry-client/src/attributes.ts @@ -62,3 +62,7 @@ export const TX_PHASE_NAME = 'aztec.tx.phase_name'; export const PROVING_JOB_TYPE = 'aztec.proving.job_type'; export const MERKLE_TREE_NAME = 'aztec.merkle_tree.name'; +/** The prover-id in a root rollup proof. */ +export const ROLLUP_PROVER_ID = 'aztec.rollup.prover_id'; +/** Whether the proof submission was timed out (delayed more than 20 min) */ +export const PROOF_TIMED_OUT = 'aztec.proof.timed_out'; diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 7d69b9904e0..48d8deda753 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -29,6 +29,8 @@ export const MEMPOOL_TX_SIZE = 'aztec.mempool.tx_size'; export const ARCHIVER_SYNC_DURATION = 'aztec.archiver.sync_duration'; export const ARCHIVER_BLOCK_HEIGHT = 'aztec.archiver.block_height'; export const ARCHIVER_BLOCK_SIZE = 'aztec.archiver.block_size'; +export const ARCHIVER_ROLLUP_PROOF_DELAY = 'aztec.archiver.rollup_proof_delay'; +export const ARCHIVER_ROLLUP_PROOF_COUNT = 'aztec.archiver.rollup_proof_count'; export const NODE_RECEIVE_TX_DURATION = 'aztec.node.receive_tx.duration'; export const NODE_RECEIVE_TX_COUNT = 'aztec.node.receive_tx.count'; diff --git a/yarn-project/telemetry-client/src/noop.ts b/yarn-project/telemetry-client/src/noop.ts index e4ab8162ffb..11872833ba6 100644 --- a/yarn-project/telemetry-client/src/noop.ts +++ b/yarn-project/telemetry-client/src/noop.ts @@ -14,6 +14,10 @@ export class NoopTelemetryClient implements TelemetryClient { stop(): Promise { return Promise.resolve(); } + + isEnabled() { + return false; + } } // @opentelemetry/api internally uses NoopTracer and NoopSpan but they're not exported diff --git a/yarn-project/telemetry-client/src/otel.ts b/yarn-project/telemetry-client/src/otel.ts index 1d168895a2f..7e6ae406233 100644 --- a/yarn-project/telemetry-client/src/otel.ts +++ b/yarn-project/telemetry-client/src/otel.ts @@ -66,6 +66,10 @@ export class OpenTelemetryClient implements TelemetryClient { this.hostMetrics.start(); } + public isEnabled() { + return true; + } + public async stop() { await Promise.all([this.meterProvider.shutdown()]); } diff --git a/yarn-project/telemetry-client/src/telemetry.ts b/yarn-project/telemetry-client/src/telemetry.ts index b52e5bae1e8..a481690f155 100644 --- a/yarn-project/telemetry-client/src/telemetry.ts +++ b/yarn-project/telemetry-client/src/telemetry.ts @@ -79,6 +79,10 @@ export interface Meter { * A telemetry client that provides meters for recording metrics. */ export interface TelemetryClient { + /** + * Whether the client is enabled and reporting metrics. + **/ + isEnabled(): boolean; /** * Creates a new meter * @param name - The name of the meter.