diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index de9e5c88ea7..c6f08a170e1 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -136,10 +136,27 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { override(ITestRollup) onlyOwner { + fakeBlockNumberAsProven(blockNumber); + assumeProvenThroughBlockNumber = blockNumber; + } + + function fakeBlockNumberAsProven(uint256 blockNumber) private { if (blockNumber > tips.provenBlockNumber && blockNumber <= tips.pendingBlockNumber) { tips.provenBlockNumber = blockNumber; + + // If this results on a new epoch, create a fake claim for it + // Otherwise nextEpochToProve will report an old epoch + Epoch epoch = getEpochForBlock(blockNumber); + if (Epoch.unwrap(epoch) == 0 || Epoch.unwrap(epoch) > Epoch.unwrap(proofClaim.epochToProve)) { + proofClaim = DataStructures.EpochProofClaim({ + epochToProve: epoch, + basisPointFee: 0, + bondAmount: 0, + bondProvider: address(0), + proposerClaimant: msg.sender + }); + } } - assumeProvenThroughBlockNumber = blockNumber; } /** @@ -425,7 +442,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { // Automatically flag the block as proven if we have cheated and set assumeProvenThroughBlockNumber. if (blockNumber <= assumeProvenThroughBlockNumber) { - tips.provenBlockNumber = blockNumber; + fakeBlockNumberAsProven(blockNumber); if (header.globalVariables.coinbase != address(0) && header.totalFees > 0) { // @note This will currently fail if there are insufficient funds in the bridge diff --git a/scripts/ci/get_e2e_jobs.sh b/scripts/ci/get_e2e_jobs.sh index 18c54f39ea4..dbccca50fa2 100755 --- a/scripts/ci/get_e2e_jobs.sh +++ b/scripts/ci/get_e2e_jobs.sh @@ -24,7 +24,7 @@ allow_list=( "e2e-static-calls" "integration-l1-publisher" "e2e-cheat-codes" - "e2e-prover-with-padding" + "e2e-prover-fake-proofs" "e2e-lending-contract" ) diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 82d561f27e8..9ec2babca55 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -214,6 +214,7 @@ end-to-end-base: ENV CHROME_BIN="/usr/bin/chromium" ENV PATH=/opt/foundry/bin:$PATH ENV HARDWARE_CONCURRENCY="" + ENV FAKE_PROOFS="" ENV BB_WORKING_DIRECTORY=/usr/src/bb ENV BB_BINARY_PATH=/usr/src/barretenberg/cpp/build/bin/bb ENV ACVM_WORKING_DIRECTORY=/usr/src/acvm diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index c1571c1429c..4afb656f15c 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -4,7 +4,8 @@ "type": "module", "exports": { ".": "./dest/index.js", - "./data-retrieval": "./dest/archiver/data_retrieval.js" + "./data-retrieval": "./dest/archiver/data_retrieval.js", + "./test": "./dest/test/index.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 23f5f668254..cccef6420c9 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -18,6 +18,7 @@ import { ContractClassRegisteredEvent, ContractInstanceDeployedEvent, type FunctionSelector, + type Header, PrivateFunctionBroadcastedEvent, UnconstrainedFunctionBroadcastedEvent, isValidPrivateFunctionMembershipProof, @@ -57,6 +58,12 @@ import { import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js'; import { type ArchiverConfig } from './config.js'; import { retrieveBlockFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js'; +import { + getEpochNumberAtTimestamp, + getSlotAtTimestamp, + getSlotRangeForEpoch, + getTimestampRangeForEpoch, +} from './epoch_helpers.js'; import { ArchiverInstrumentation } from './instrumentation.js'; import { type DataRetrieval } from './structs/data_retrieval.js'; import { type L1Published } from './structs/published.js'; @@ -82,6 +89,9 @@ export class Archiver implements ArchiveSource { private store: ArchiverStoreHelper; + public l1BlockNumber: bigint | undefined; + public l1Timestamp: bigint | undefined; + /** * Creates a new instance of the Archiver. * @param publicClient - A client for interacting with the Ethereum node. @@ -98,9 +108,9 @@ export class Archiver implements ArchiveSource { readonly inboxAddress: EthAddress, private readonly registryAddress: EthAddress, readonly dataStore: ArchiverDataStore, - private readonly pollingIntervalMs = 10_000, + private readonly pollingIntervalMs: number, private readonly instrumentation: ArchiverInstrumentation, - private readonly l1StartBlock: bigint = 0n, + private readonly l1constants: L1RollupConstants = EmptyL1RollupConstants, private readonly log: DebugLogger = createDebugLogger('aztec:archiver'), ) { this.store = new ArchiverStoreHelper(dataStore); @@ -144,7 +154,10 @@ export class Archiver implements ArchiveSource { client: publicClient, }); - const l1StartBlock = await rollup.read.L1_BLOCK_AT_GENESIS(); + const [l1StartBlock, l1GenesisTime] = await Promise.all([ + rollup.read.L1_BLOCK_AT_GENESIS(), + rollup.read.GENESIS_TIME(), + ] as const); const archiver = new Archiver( publicClient, @@ -152,9 +165,9 @@ export class Archiver implements ArchiveSource { config.l1Contracts.inboxAddress, config.l1Contracts.registryAddress, archiverStore, - config.archiverPollingIntervalMS, + config.archiverPollingIntervalMS ?? 10_000, new ArchiverInstrumentation(telemetry), - BigInt(l1StartBlock), + { l1StartBlock, l1GenesisTime }, ); await archiver.start(blockUntilSynced); return archiver; @@ -206,8 +219,8 @@ export class Archiver implements ArchiveSource { * * This code does not handle reorgs. */ - const { blocksSynchedTo = this.l1StartBlock, messagesSynchedTo = this.l1StartBlock } = - await this.store.getSynchPoint(); + const { l1StartBlock } = this.l1constants; + const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint(); const currentL1BlockNumber = await this.publicClient.getBlockNumber(); // ********** Ensuring Consistency of data pulled from L1 ********** @@ -234,6 +247,12 @@ export class Archiver implements ArchiveSource { // ********** Events that are processed per L2 block ********** await this.handleL2blocks(blockUntilSynced, blocksSynchedTo, currentL1BlockNumber); + + // Store latest l1 block number and timestamp seen. Used for epoch and slots calculations. + if (!this.l1BlockNumber || this.l1BlockNumber < currentL1BlockNumber) { + this.l1Timestamp = await this.publicClient.getBlock({ blockNumber: currentL1BlockNumber }).then(b => b.timestamp); + this.l1BlockNumber = currentL1BlockNumber; + } } private async handleL1ToL2Messages( @@ -421,6 +440,68 @@ export class Archiver implements ArchiveSource { return Promise.resolve(this.registryAddress); } + public getL1BlockNumber(): bigint { + const l1BlockNumber = this.l1BlockNumber; + if (!l1BlockNumber) { + throw new Error('L1 block number not yet available. Complete an initial sync first.'); + } + return l1BlockNumber; + } + + public getL1Timestamp(): bigint { + const l1Timestamp = this.l1Timestamp; + if (!l1Timestamp) { + throw new Error('L1 timestamp not yet available. Complete an initial sync first.'); + } + return l1Timestamp; + } + + public getL2SlotNumber(): Promise { + return Promise.resolve(getSlotAtTimestamp(this.getL1Timestamp(), this.l1constants)); + } + + public getL2EpochNumber(): Promise { + return Promise.resolve(getEpochNumberAtTimestamp(this.getL1Timestamp(), this.l1constants)); + } + + public async getBlocksForEpoch(epochNumber: bigint): Promise { + const [start, end] = getSlotRangeForEpoch(epochNumber); + const blocks: L2Block[] = []; + + // Walk the list of blocks backwards and filter by slots matching the requested epoch. + // We'll typically ask for blocks for a very recent epoch, so we shouldn't need an index here. + let block = await this.getBlock(await this.store.getSynchedL2BlockNumber()); + const slot = (b: L2Block) => b.header.globalVariables.slotNumber.toBigInt(); + while (block && slot(block) >= start) { + if (slot(block) <= end) { + blocks.push(block); + } + block = await this.getBlock(block.number - 1); + } + + return blocks; + } + + public async isEpochComplete(epochNumber: bigint): Promise { + // The epoch is complete if the current L2 block is the last one in the epoch (or later) + const header = await this.getBlockHeader('latest'); + const slot = header?.globalVariables.slotNumber.toBigInt(); + const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber); + if (slot && slot >= endSlot) { + return true; + } + + // If not, the epoch may also be complete if the L2 slot has passed without a block + // We compute this based on the timestamp for the given epoch and the timestamp of the last L1 block + const l1Timestamp = this.getL1Timestamp(); + const [_startTimestamp, endTimestamp] = getTimestampRangeForEpoch(epochNumber, this.l1constants); + + // For this computation, we throw in a few extra seconds just for good measure, + // since we know the next L1 block won't be mined within this range + const leeway = 3n; + return l1Timestamp + leeway >= endTimestamp; + } + /** * Gets up to `limit` amount of L2 blocks starting from `from`. * @param from - Number of the first block to return (inclusive). @@ -452,6 +533,14 @@ export class Archiver implements ArchiveSource { return blocks.length === 0 ? undefined : blocks[0].data; } + public async getBlockHeader(number: number | 'latest'): Promise
{ + if (number === 'latest') { + number = await this.store.getSynchedL2BlockNumber(); + } + const headers = await this.store.getBlockHeaders(number, 1); + return headers.length === 0 ? undefined : headers[0]; + } + public getTxEffect(txHash: TxHash): Promise { return this.store.getTxEffect(txHash); } @@ -735,11 +824,12 @@ class ArchiverStoreHelper getBlocks(from: number, limit: number): Promise[]> { return this.store.getBlocks(from, limit); } - + getBlockHeaders(from: number, limit: number): Promise { + return this.store.getBlockHeaders(from, limit); + } getTxEffect(txHash: TxHash): Promise { return this.store.getTxEffect(txHash); } - getSettledTxReceipt(txHash: TxHash): Promise { return this.store.getSettledTxReceipt(txHash); } @@ -805,3 +895,13 @@ class ArchiverStoreHelper return this.store.getTotalL1ToL2MessageCount(); } } + +type L1RollupConstants = { + l1StartBlock: bigint; + l1GenesisTime: bigint; +}; + +const EmptyL1RollupConstants: L1RollupConstants = { + l1StartBlock: 0n, + l1GenesisTime: 0n, +}; diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index b181c44db39..af04befb3cc 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -10,7 +10,7 @@ import { type TxHash, type TxReceipt, } from '@aztec/circuit-types'; -import { type Fr } from '@aztec/circuits.js'; +import { type Fr, type Header } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { @@ -64,6 +64,14 @@ export interface ArchiverDataStore { */ getBlocks(from: number, limit: number): Promise[]>; + /** + * Gets up to `limit` amount of L2 block headers starting from `from`. + * @param from - Number of the first block to return (inclusive). + * @param limit - The number of blocks to return. + * @returns The requested L2 block headers. + */ + getBlockHeaders(from: number, limit: number): Promise; + /** * Gets a tx effect. * @param txHash - The txHash of the tx corresponding to the tx effect. diff --git a/yarn-project/archiver/src/archiver/epoch_helpers.ts b/yarn-project/archiver/src/archiver/epoch_helpers.ts new file mode 100644 index 00000000000..2612329dd06 --- /dev/null +++ b/yarn-project/archiver/src/archiver/epoch_helpers.ts @@ -0,0 +1,26 @@ +import { AZTEC_EPOCH_DURATION, AZTEC_SLOT_DURATION } from '@aztec/circuits.js'; + +/** Returns the slot number for a given timestamp. */ +export function getSlotAtTimestamp(ts: bigint, constants: { l1GenesisTime: bigint }) { + return ts < constants.l1GenesisTime ? 0n : (ts - constants.l1GenesisTime) / BigInt(AZTEC_SLOT_DURATION); +} + +/** Returns the epoch number for a given timestamp. */ +export function getEpochNumberAtTimestamp(ts: bigint, constants: { l1GenesisTime: bigint }) { + return getSlotAtTimestamp(ts, constants) / BigInt(AZTEC_EPOCH_DURATION); +} + +/** Returns the range of slots (inclusive) for a given epoch number. */ +export function getSlotRangeForEpoch(epochNumber: bigint) { + const startSlot = epochNumber * BigInt(AZTEC_EPOCH_DURATION); + return [startSlot, startSlot + BigInt(AZTEC_EPOCH_DURATION) - 1n]; +} + +/** Returns the range of L1 timestamps (inclusive) for a given epoch number. */ +export function getTimestampRangeForEpoch(epochNumber: bigint, constants: { l1GenesisTime: bigint }) { + const [startSlot, endSlot] = getSlotRangeForEpoch(epochNumber); + return [ + constants.l1GenesisTime + startSlot * BigInt(AZTEC_SLOT_DURATION), + constants.l1GenesisTime + endSlot * BigInt(AZTEC_SLOT_DURATION), + ]; +} diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index 7c650c0e1d2..a4a3ba3281c 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -139,6 +139,18 @@ export class BlockStore { return this.getBlockFromBlockStorage(blockStorage); } + /** + * Gets the headers for a sequence of L2 blocks. + * @param start - Number of the first block to return (inclusive). + * @param limit - The number of blocks to return. + * @returns The requested L2 block headers + */ + *getBlockHeaders(start: number, limit: number): IterableIterator
{ + for (const blockStorage of this.#blocks.values(this.#computeBlockRange(start, limit))) { + yield Header.fromBuffer(blockStorage.header); + } + } + private getBlockFromBlockStorage(blockStorage: BlockStorage) { const header = Header.fromBuffer(blockStorage.header); const archive = AppendOnlyTreeSnapshot.fromBuffer(blockStorage.archive); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index baebf6efc64..37a2d7d11f4 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -10,7 +10,7 @@ import { type TxHash, type TxReceipt, } from '@aztec/circuit-types'; -import { type Fr } from '@aztec/circuits.js'; +import { type Fr, type Header } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -136,6 +136,22 @@ export class KVArchiverDataStore implements ArchiverDataStore { } } + /** + * Gets up to `limit` amount of L2 blocks headers starting from `from`. + * + * @param start - Number of the first block to return (inclusive). + * @param limit - The number of blocks to return. + * @returns The requested L2 blocks + */ + getBlockHeaders(start: number, limit: number): Promise { + try { + return Promise.resolve(Array.from(this.#blockStore.getBlockHeaders(start, limit))); + } catch (err) { + // this function is sync so if any errors are thrown we need to make sure they're passed on as rejected Promises + return Promise.reject(err); + } + } + /** * Gets a tx effect. * @param txHash - The txHash of the tx corresponding to the tx effect. diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index df06ee022de..6bb14927baa 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -15,7 +15,7 @@ import { TxReceipt, type UnencryptedL2BlockL2Logs, } from '@aztec/circuit-types'; -import { Fr, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; +import { Fr, type Header, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { @@ -262,7 +262,6 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @remarks When "from" is smaller than genesis block number, blocks from the beginning are returned. */ public getBlocks(from: number, limit: number): Promise[]> { - // Return an empty array if we are outside of range if (limit < 1) { return Promise.reject(new Error(`Invalid limit: ${limit}`)); } @@ -280,6 +279,11 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(this.l2Blocks.slice(fromIndex, toIndex)); } + public async getBlockHeaders(from: number, limit: number): Promise { + const blocks = await this.getBlocks(from, limit); + return blocks.map(block => block.data.header); + } + /** * Gets a tx effect. * @param txHash - The txHash of the tx effect. diff --git a/yarn-project/archiver/src/test/index.ts b/yarn-project/archiver/src/test/index.ts new file mode 100644 index 00000000000..5ff05e2acb4 --- /dev/null +++ b/yarn-project/archiver/src/test/index.ts @@ -0,0 +1 @@ +export * from './mock_l2_block_source.js'; diff --git a/yarn-project/p2p/src/client/mocks.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts similarity index 81% rename from yarn-project/p2p/src/client/mocks.ts rename to yarn-project/archiver/src/test/mock_l2_block_source.ts index 1783c83a8bd..4206f29c4fe 100644 --- a/yarn-project/p2p/src/client/mocks.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -1,5 +1,7 @@ import { L2Block, type L2BlockSource, type TxEffect, type TxHash, TxReceipt, TxStatus } from '@aztec/circuit-types'; -import { EthAddress } from '@aztec/circuits.js'; +import { EthAddress, type Header } from '@aztec/circuits.js'; + +import { getSlotRangeForEpoch } from '../archiver/epoch_helpers.js'; /** * A mocked implementation of L2BlockSource to be used in p2p tests. @@ -85,6 +87,19 @@ export class MockBlockSource implements L2BlockSource { ); } + getBlockHeader(number: number | 'latest'): Promise
{ + return Promise.resolve(this.l2Blocks.at(typeof number === 'number' ? number : -1)?.header); + } + + getBlocksForEpoch(epochNumber: bigint): Promise { + const [start, end] = getSlotRangeForEpoch(epochNumber); + const blocks = this.l2Blocks.filter(b => { + const slot = b.header.globalVariables.slotNumber.toBigInt(); + return slot >= start && slot <= end; + }); + return Promise.resolve(blocks); + } + /** * Gets a tx effect. * @param txHash - The hash of a transaction which resulted in the returned tx effect. @@ -120,6 +135,18 @@ export class MockBlockSource implements L2BlockSource { return Promise.resolve(undefined); } + getL2EpochNumber(): Promise { + throw new Error('Method not implemented.'); + } + + getL2SlotNumber(): Promise { + throw new Error('Method not implemented.'); + } + + isEpochComplete(_epochNumber: bigint): Promise { + throw new Error('Method not implemented.'); + } + /** * Starts the block source. In this mock implementation, this is a noop. * @returns A promise that signals the initialization of the l2 block source on completion. diff --git a/yarn-project/aztec.js/src/utils/cheat_codes.ts b/yarn-project/aztec.js/src/utils/cheat_codes.ts index f37e256ff7b..e955cf858c6 100644 --- a/yarn-project/aztec.js/src/utils/cheat_codes.ts +++ b/yarn-project/aztec.js/src/utils/cheat_codes.ts @@ -1,31 +1,46 @@ -import { type Note, type PXE } from '@aztec/circuit-types'; -import { type AztecAddress, type EthAddress, Fr } from '@aztec/circuits.js'; +import { type EpochProofClaim, type Note, type PXE } from '@aztec/circuit-types'; +import { AZTEC_EPOCH_DURATION, AZTEC_SLOT_DURATION, type AztecAddress, EthAddress, Fr } from '@aztec/circuits.js'; import { deriveStorageSlotInMap } from '@aztec/circuits.js/hash'; +import { type L1ContractAddresses } from '@aztec/ethereum'; import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer'; import { keccak256 } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; +import { RollupAbi } from '@aztec/l1-artifacts'; import fs from 'fs'; +import { + type GetContractReturnType, + type Hex, + type PublicClient, + type WalletClient, + createWalletClient, + getContract, + http, + publicActions, +} from 'viem'; +import { foundry } from 'viem/chains'; /** * A class that provides utility functions for interacting with the chain. */ export class CheatCodes { constructor( - /** - * The cheat codes for ethereum (L1). - */ + /** Cheat codes for L1.*/ public eth: EthCheatCodes, - /** - * The cheat codes for aztec. - */ + /** Cheat codes for Aztec L2. */ public aztec: AztecCheatCodes, + /** Cheat codes for the Aztec Rollup contract on L1. */ + public rollup: RollupCheatCodes, ) {} - static create(rpcUrl: string, pxe: PXE): CheatCodes { + static async create(rpcUrl: string, pxe: PXE): Promise { const ethCheatCodes = new EthCheatCodes(rpcUrl); const aztecCheatCodes = new AztecCheatCodes(pxe, ethCheatCodes); - return new CheatCodes(ethCheatCodes, aztecCheatCodes); + const rollupCheatCodes = new RollupCheatCodes( + ethCheatCodes, + await pxe.getNodeInfo().then(n => n.l1ContractAddresses), + ); + return new CheatCodes(ethCheatCodes, aztecCheatCodes, rollupCheatCodes); } } @@ -227,7 +242,7 @@ export class EthCheatCodes { * Send transactions impersonating an externally owned account or contract. * @param who - The address to impersonate */ - public async startImpersonating(who: EthAddress): Promise { + public async startImpersonating(who: EthAddress | Hex): Promise { const res = await this.rpcCall('hardhat_impersonateAccount', [who.toString()]); if (res.error) { throw new Error(`Error impersonating ${who}: ${res.error.message}`); @@ -239,7 +254,7 @@ export class EthCheatCodes { * Stop impersonating an account that you are currently impersonating. * @param who - The address to stop impersonating */ - public async stopImpersonating(who: EthAddress): Promise { + public async stopImpersonating(who: EthAddress | Hex): Promise { const res = await this.rpcCall('hardhat_stopImpersonatingAccount', [who.toString()]); if (res.error) { throw new Error(`Error when stopping the impersonation of ${who}: ${res.error.message}`); @@ -271,6 +286,96 @@ export class EthCheatCodes { } } +/** Cheat codes for the L1 rollup contract. */ +export class RollupCheatCodes { + private client: WalletClient & PublicClient; + private rollup: GetContractReturnType; + + private logger = createDebugLogger('aztec:js:cheat_codes'); + + constructor(private ethCheatCodes: EthCheatCodes, private addresses: Pick) { + this.client = createWalletClient({ chain: foundry, transport: http(ethCheatCodes.rpcUrl) }).extend(publicActions); + this.rollup = getContract({ + abi: RollupAbi, + address: addresses.rollupAddress.toString(), + client: this.client, + }); + } + + /** Returns the current slot */ + public async getSlot() { + const ts = BigInt((await this.client.getBlock()).timestamp); + return await this.rollup.read.getSlotAt([ts]); + } + + /** Returns the current epoch */ + public async getEpoch() { + const slotNumber = await this.getSlot(); + return await this.rollup.read.getEpochAtSlot([slotNumber]); + } + + /** Warps time in L1 until the next epoch */ + public async advanceToNextEpoch() { + const slot = await this.getSlot(); + const slotsUntilNextEpoch = BigInt(AZTEC_EPOCH_DURATION) - (slot % BigInt(AZTEC_EPOCH_DURATION)) + 1n; + const timeToNextEpoch = slotsUntilNextEpoch * BigInt(AZTEC_SLOT_DURATION); + const l1Timestamp = BigInt((await this.client.getBlock()).timestamp); + await this.ethCheatCodes.warp(Number(l1Timestamp + timeToNextEpoch)); + this.logger.verbose(`Advanced to next epoch`); + } + + /** Returns the current proof claim (if any) */ + public async getProofClaim(): Promise { + // REFACTOR: This code is duplicated from l1-publisher + const [epochToProve, basisPointFee, bondAmount, bondProviderHex, proposerClaimantHex] = + await this.rollup.read.proofClaim(); + + const bondProvider = EthAddress.fromString(bondProviderHex); + const proposerClaimant = EthAddress.fromString(proposerClaimantHex); + + if (bondProvider.isZero() && proposerClaimant.isZero() && epochToProve === 0n) { + return undefined; + } + + return { + epochToProve, + basisPointFee, + bondAmount, + bondProvider, + proposerClaimant, + }; + } + + /** + * Marks the specified block (or latest if none) as proven + * @param maybeBlockNumber - The block number to mark as proven (defaults to latest pending) + */ + public async markAsProven(maybeBlockNumber?: number | bigint) { + const blockNumber = maybeBlockNumber + ? BigInt(maybeBlockNumber) + : await this.rollup.read.tips().then(([pending]) => pending); + + await this.asOwner(async account => { + // TODO: Figure out proper typing for viem + await this.rollup.write.setAssumeProvenThroughBlockNumber([blockNumber], { account } as any); + this.logger.verbose(`Marked ${blockNumber} as proven`); + }); + } + + /** + * Executes an action impersonated as the owner of the Rollup contract. + * @param action - The action to execute + */ + public async asOwner( + action: (owner: Hex, rollup: GetContractReturnType) => Promise, + ) { + const owner = await this.rollup.read.owner(); + await this.ethCheatCodes.startImpersonating(owner); + await action(owner, this.rollup); + await this.ethCheatCodes.stopImpersonating(owner); + } +} + /** * A class that provides utility functions for interacting with the aztec chain. */ diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts index ca054a01ec4..8f89b64fda3 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts @@ -84,7 +84,7 @@ export const startProverNode = async ( signalHandlers.push(proverNode.stop); // Automatically start proving unproven blocks - proverNode.start(); + await proverNode.start(); return services; }; diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 733982bb432..822c2dad05d 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -28,12 +28,13 @@ import type { SequencerConfig } from './configs.js'; import type { L2BlockNumber } from './l2_block_number.js'; import type { NullifierMembershipWitness } from './nullifier_tree.js'; import type { ProverConfig } from './prover-client.js'; +import { type ProverCoordination } from './prover-coordination.js'; /** * The aztec node. * We will probably implement the additional interfaces by means other than Aztec Node as it's currently a privacy leak */ -export interface AztecNode { +export interface AztecNode extends ProverCoordination { /** * Find the index of the given leaf in the given tree. * @param blockNumber - The block number at which to get the data or 'latest' for latest data diff --git a/yarn-project/circuit-types/src/l2_block_source.ts b/yarn-project/circuit-types/src/l2_block_source.ts index 65bcf58d7de..0d9fc597839 100644 --- a/yarn-project/circuit-types/src/l2_block_source.ts +++ b/yarn-project/circuit-types/src/l2_block_source.ts @@ -1,4 +1,4 @@ -import { type EthAddress } from '@aztec/circuits.js'; +import { type EthAddress, type Header } from '@aztec/circuits.js'; import { type L2Block } from './l2_block.js'; import { type TxHash } from './tx/tx_hash.js'; @@ -46,6 +46,13 @@ export interface L2BlockSource { */ getBlock(number: number): Promise; + /** + * Gets an l2 block header. + * @param number - The block number to return or 'latest' for the most recent one. + * @returns The requested L2 block header. + */ + getBlockHeader(number: number | 'latest'): Promise
; + /** * Gets up to `limit` amount of L2 blocks starting from `from`. * @param from - Number of the first block to return (inclusive). @@ -69,6 +76,29 @@ export interface L2BlockSource { */ getSettledTxReceipt(txHash: TxHash): Promise; + /** + * Returns the current L2 slot number based on the current L1 timestamp. + */ + getL2SlotNumber(): Promise; + + /** + * Returns the current L2 epoch number based on the current L1 timestamp. + */ + getL2EpochNumber(): Promise; + + /** + * Returns all blocks for a given epoch. + * @dev Use this method only with recent epochs, since it walks the block list backwards. + * @param epochNumber - The epoch number to return blocks for. + */ + getBlocksForEpoch(epochNumber: bigint): Promise; + + /** + * Returns whether the given epoch is completed on L1, based on the current L1 and L2 block numbers. + * @param epochNumber - The epoch number to check. + */ + isEpochComplete(epochNumber: bigint): Promise; + /** * Starts the L2 block source. * @param blockUntilSynced - If true, blocks until the data source has fully synced. diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_claim.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_claim.ts new file mode 100644 index 00000000000..3c41f0b00df --- /dev/null +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_claim.ts @@ -0,0 +1,9 @@ +import { type EthAddress } from '@aztec/circuits.js'; + +export type EpochProofClaim = { + epochToProve: bigint; + basisPointFee: bigint; + bondAmount: bigint; + bondProvider: EthAddress; + proposerClaimant: EthAddress; +}; diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts index 9ab6d7172c4..104188a2350 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts @@ -4,7 +4,7 @@ import { EpochProofQuotePayload } from './epoch_proof_quote_payload.js'; describe('epoch proof quote', () => { it('should serialize / deserialize', () => { - const payload = EpochProofQuotePayload.fromFields({ + const payload = EpochProofQuotePayload.from({ basisPointFee: 5000, bondAmount: 1000000000000000000n, epochToProve: 42n, diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts index e7b62c707f9..9555df73920 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts @@ -38,7 +38,7 @@ export class EpochProofQuotePayload { ); } - static fromFields(fields: FieldsOf): EpochProofQuotePayload { + static from(fields: FieldsOf): EpochProofQuotePayload { return new EpochProofQuotePayload( fields.epochToProve, fields.validUntilSlot, diff --git a/yarn-project/circuit-types/src/prover_coordination/index.ts b/yarn-project/circuit-types/src/prover_coordination/index.ts index 331978ec556..33b8a68050b 100644 --- a/yarn-project/circuit-types/src/prover_coordination/index.ts +++ b/yarn-project/circuit-types/src/prover_coordination/index.ts @@ -1,2 +1,3 @@ export * from './epoch_proof_quote.js'; export * from './epoch_proof_quote_payload.js'; +export * from './epoch_proof_claim.js'; diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index d6de4cbaf12..e58867f50e8 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -24,9 +24,9 @@ e2e-prover-full: LOCALLY RUN HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-32} ./scripts/e2e_test.sh ./src/e2e_prover/full -e2e-prover-with-padding: +e2e-prover-fake-proofs: LOCALLY - RUN HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-32} ./scripts/e2e_test.sh ./src/e2e_prover/with_padding + RUN FAKE_PROOFS=1 ./scripts/e2e_test.sh ./src/e2e_prover/full e2e-account-contracts: LOCALLY @@ -120,10 +120,6 @@ e2e-private-voting-contract: LOCALLY RUN ./scripts/e2e_test.sh ./src/e2e_private_voting_contract.test.ts -e2e-prover-node: - LOCALLY - RUN ./scripts/e2e_test.sh ./src/e2e_prover_node.test.ts - e2e-fees-private-payments: LOCALLY RUN ./scripts/e2e_test.sh ./src/e2e_fees/private_payments.test.ts diff --git a/yarn-project/end-to-end/scripts/e2e_test.sh b/yarn-project/end-to-end/scripts/e2e_test.sh index 5faec984d16..f826a164000 100755 --- a/yarn-project/end-to-end/scripts/e2e_test.sh +++ b/yarn-project/end-to-end/scripts/e2e_test.sh @@ -10,6 +10,7 @@ set -eu TEST="$1" # Default values for environment variables HARDWARE_CONCURRENCY="${HARDWARE_CONCURRENCY:-}" +FAKE_PROOFS="${FAKE_PROOFS:-}" AZTEC_DOCKER_TAG=$(git rev-parse HEAD) if ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG"; then @@ -17,4 +18,4 @@ if ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotoco exit 1 fi -docker run -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG "$TEST" +docker run -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" -e FAKE_PROOFS="$FAKE_PROOFS" --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG "$TEST" diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index 9431cf0da32..ba5f1c0f4bf 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -3,6 +3,7 @@ import { type Archiver, createArchiver } from '@aztec/archiver'; import { type AccountWalletWithSecretKey, type AztecNode, + type CheatCodes, type CompleteAddress, type DebugLogger, type DeployL1Contracts, @@ -76,6 +77,7 @@ export class FullProverTest { tokenSim!: TokenSimulator; aztecNode!: AztecNode; pxe!: PXEService; + cheatCodes!: CheatCodes; private provenComponents: ProvenSetup[] = []; private bbConfigCleanup?: () => Promise; private acvmConfigCleanup?: () => Promise; @@ -116,7 +118,7 @@ export class FullProverTest { // Create the token contract state. // Move this account thing to addAccounts above? this.logger.verbose(`Public deploy accounts...`); - await publicDeployAccounts(this.wallets[0], this.accounts.slice(0, 2)); + await publicDeployAccounts(this.wallets[0], this.accounts.slice(0, 2), false); this.logger.verbose(`Deploying TokenContract...`); const asset = await TokenContract.deploy( @@ -156,10 +158,10 @@ export class FullProverTest { aztecNode: this.aztecNode, proverNode: this.simulatedProverNode, deployL1ContractsValues: this.l1Contracts, + cheatCodes: this.cheatCodes, } = this.context); // Configure a full prover PXE - let acvmConfig: Awaited> | undefined; let bbConfig: Awaited> | undefined; if (this.realProofs) { @@ -190,8 +192,13 @@ export class FullProverTest { }); } - this.logger.debug(`Main setup completed, initializing full prover PXE, Node, and Prover Node...`); + this.logger.verbose(`Move to a clean epoch`); + await this.context.cheatCodes.rollup.advanceToNextEpoch(); + + this.logger.verbose(`Marking current block as proven`); + await this.context.cheatCodes.rollup.markAsProven(); + this.logger.verbose(`Main setup completed, initializing full prover PXE, Node, and Prover Node`); for (let i = 0; i < 2; i++) { const result = await setupPXEService( this.aztecNode, @@ -235,7 +242,6 @@ export class FullProverTest { }); this.provenAssets.push(asset); } - this.logger.info(`Full prover PXE started`); // Shutdown the current, simulated prover node @@ -243,7 +249,6 @@ export class FullProverTest { await this.simulatedProverNode.stop(); // Creating temp store and archiver for fully proven prover node - this.logger.verbose('Starting archiver for new prover node'); const archiver = await createArchiver( { ...this.context.aztecNodeConfig, dataDirectory: undefined }, @@ -254,7 +259,7 @@ export class FullProverTest { // The simulated prover node (now shutdown) used private key index 2 const proverNodePrivateKey = getPrivateKeyFromIndex(2); - this.logger.verbose('Starting fully proven prover node'); + this.logger.verbose('Starting prover node'); const proverConfig: ProverNodeConfig = { ...this.context.aztecNodeConfig, proverCoordinationNodeUrl: undefined, @@ -264,15 +269,17 @@ export class FullProverTest { proverAgentConcurrency: 2, publisherPrivateKey: `0x${proverNodePrivateKey!.toString('hex')}`, proverNodeMaxPendingJobs: 100, + proverNodePollingIntervalMs: 100, + quoteProviderBasisPointFee: 100, + quoteProviderBondAmount: 1000n, }; this.proverNode = await createProverNode(proverConfig, { aztecNodeTxProvider: this.aztecNode, archiver: archiver as Archiver, }); - this.proverNode.start(); - - this.logger.info('Prover node started'); + await this.proverNode.start(); + this.logger.warn(`Proofs are now enabled`); return this; } diff --git a/yarn-project/end-to-end/src/e2e_prover/full.test.ts b/yarn-project/end-to-end/src/e2e_prover/full.test.ts index a5afa757502..316b65b42d7 100644 --- a/yarn-project/end-to-end/src/e2e_prover/full.test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/full.test.ts @@ -1,3 +1,7 @@ +import { type AztecAddress, retryUntil } from '@aztec/aztec.js'; + +import '@jest/globals'; + import { FullProverTest } from './e2e_prover_test.js'; const TIMEOUT = 1_800_000; @@ -8,14 +12,19 @@ process.env.AVM_PROVING_STRICT = '1'; describe('full_prover', () => { const realProofs = !['true', '1'].includes(process.env.FAKE_PROOFS ?? ''); const t = new FullProverTest('full_prover', 1, realProofs); - let { provenAssets, accounts, tokenSim, logger } = t; + + let { provenAssets, accounts, tokenSim, logger, cheatCodes } = t; + let sender: AztecAddress; + let recipient: AztecAddress; beforeAll(async () => { await t.applyBaseSnapshots(); await t.applyMintSnapshot(); await t.setup(); await t.deployVerifier(); - ({ provenAssets, accounts, tokenSim, logger } = t); + + ({ provenAssets, accounts, tokenSim, logger, cheatCodes } = t); + [sender, recipient] = accounts.map(a => a.address); }); afterAll(async () => { @@ -29,47 +38,65 @@ describe('full_prover', () => { it( 'makes both public and private transfers', async () => { - logger.info( - `Starting test using function: ${provenAssets[0].address}:${provenAssets[0].methods.balance_of_private.selector}`, - ); - const privateBalance = await provenAssets[0].methods.balance_of_private(accounts[0].address).simulate(); - const privateSendAmount = privateBalance / 2n; + logger.info(`Starting test for public and private transfer`); + + // Create the two transactions + const privateBalance = await provenAssets[0].methods.balance_of_private(sender).simulate(); + const privateSendAmount = privateBalance / 10n; expect(privateSendAmount).toBeGreaterThan(0n); - const privateInteraction = provenAssets[0].methods.transfer(accounts[1].address, privateSendAmount); + const privateInteraction = provenAssets[0].methods.transfer(recipient, privateSendAmount); - const publicBalance = await provenAssets[1].methods.balance_of_public(accounts[0].address).simulate(); - const publicSendAmount = publicBalance / 2n; + const publicBalance = await provenAssets[1].methods.balance_of_public(sender).simulate(); + const publicSendAmount = publicBalance / 10n; expect(publicSendAmount).toBeGreaterThan(0n); - const publicInteraction = provenAssets[1].methods.transfer_public( - accounts[0].address, - accounts[1].address, - publicSendAmount, - 0, - ); + const publicInteraction = provenAssets[1].methods.transfer_public(sender, recipient, publicSendAmount, 0); + + // Prove them + logger.info(`Proving txs`); const [publicTx, privateTx] = await Promise.all([publicInteraction.prove(), privateInteraction.prove()]); - // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! - logger.info(`Verifying kernel tail to public proof`); + // Verify them + logger.info(`Verifying txs`); await expect(t.circuitProofVerifier?.verifyProof(publicTx)).resolves.not.toThrow(); - - // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! - logger.info(`Verifying private kernel tail proof`); await expect(t.circuitProofVerifier?.verifyProof(privateTx)).resolves.not.toThrow(); - // TODO(palla/prover): The following depends on the epoch boundaries to work. It assumes that we're proving - // 2-block epochs, and a new epoch is starting now, so the 2nd tx will land on the last block of the epoch and - // get proven. That relies on how many blocks we mined before getting here. - // We can make this more robust when we add padding, set 1-block epochs, and rollback the test config to - // have a min of 2 txs per block, so these both land on the same block. - logger.info(`Sending first tx and awaiting it to be mined`); - await privateInteraction.send({ skipPublicSimulation: true }).wait({ timeout: 300, interval: 10 }); - logger.info(`Sending second tx and awaiting it to be proven`); - await publicInteraction - .send({ skipPublicSimulation: true }) - .wait({ timeout: 300, interval: 10, proven: true, provenTimeout: 1500 }); - - tokenSim.transferPrivate(accounts[0].address, accounts[1].address, privateSendAmount); - tokenSim.transferPublic(accounts[0].address, accounts[1].address, publicSendAmount); + // Sends the txs to node and awaits them to be mined + logger.info(`Sending txs`); + const sendOpts = { skipPublicSimulation: true }; + const txs = [privateInteraction.send(sendOpts), publicInteraction.send(sendOpts)]; + logger.info(`Awaiting txs to be mined`); + await Promise.all(txs.map(tx => tx.wait({ timeout: 300, interval: 10, proven: false }))); + + // Flag the transfers on the token simulator + tokenSim.transferPrivate(sender, recipient, privateSendAmount); + tokenSim.transferPublic(sender, recipient, publicSendAmount); + + // Warp to the next epoch + const epoch = await cheatCodes.rollup.getEpoch(); + logger.info(`Advancing from epoch ${epoch} to next epoch`); + await cheatCodes.rollup.advanceToNextEpoch(); + + // Wait until the prover node submits a quote + logger.info(`Waiting for prover node to submit quote for epoch ${epoch}`); + await retryUntil(() => t.aztecNode.getEpochProofQuotes(epoch).then(qs => qs.length > 0), 'quote', 60, 1); + + // Send another tx so the sequencer can assemble a block that includes the prover node claim + // so the prover node starts proving + logger.info(`Sending tx to trigger a new block that includes the quote from the prover node`); + await provenAssets[0].methods + .transfer(recipient, privateSendAmount) + .send(sendOpts) + .wait({ timeout: 300, interval: 10 }); + tokenSim.transferPrivate(sender, recipient, privateSendAmount); + + // Expect the block to have a claim + const claim = await cheatCodes.rollup.getProofClaim(); + expect(claim).toBeDefined(); + expect(claim?.epochToProve).toEqual(epoch); + + // And wait for the first pair of txs to be proven + logger.info(`Awaiting proof for the previous epoch`); + await Promise.all(txs.map(tx => tx.wait({ timeout: 300, interval: 10, proven: true, provenTimeout: 1500 }))); }, TIMEOUT, ); @@ -80,11 +107,11 @@ describe('full_prover', () => { return; } - const privateInteraction = t.fakeProofsAsset.methods.transfer(accounts[1].address, 1); - const publicInteraction = t.fakeProofsAsset.methods.transfer_public(accounts[0].address, accounts[1].address, 1, 0); + const privateInteraction = t.fakeProofsAsset.methods.transfer(recipient, 1n); + const publicInteraction = t.fakeProofsAsset.methods.transfer_public(sender, recipient, 1n, 0); - const sentPrivateTx = privateInteraction.send(); - const sentPublicTx = publicInteraction.send(); + const sentPrivateTx = privateInteraction.send({ skipPublicSimulation: true }); + const sentPublicTx = publicInteraction.send({ skipPublicSimulation: true }); const results = await Promise.allSettled([ sentPrivateTx.wait({ timeout: 10, interval: 0.1 }), diff --git a/yarn-project/end-to-end/src/e2e_prover/with_padding.test.ts b/yarn-project/end-to-end/src/e2e_prover/with_padding.test.ts deleted file mode 100644 index 30e507012ab..00000000000 --- a/yarn-project/end-to-end/src/e2e_prover/with_padding.test.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { FullProverTest } from './e2e_prover_test.js'; - -const TIMEOUT = 1_800_000; - -describe('full_prover_with_padding_tx', () => { - const t = new FullProverTest('full_prover_with_padding_tx', 1); - let { provenAssets, accounts, tokenSim, logger } = t; - - beforeAll(async () => { - await t.applyBaseSnapshots(); - await t.applyMintSnapshot(); - await t.setup(); - // await t.deployVerifier(); - ({ provenAssets, accounts, tokenSim, logger } = t); - }); - - afterAll(async () => { - await t.teardown(); - }); - - afterEach(async () => { - await t.tokenSim.check(); - }); - - it( - 'makes a private transfers and pads the block with a padding tx', - async () => { - logger.info( - `Starting test using function: ${provenAssets[0].address}:${provenAssets[0].methods.balance_of_private.selector}`, - ); - const privateBalance = await provenAssets[0].methods.balance_of_private(accounts[0].address).simulate(); - const privateSendAmount = privateBalance / 2n; - expect(privateSendAmount).toBeGreaterThan(0n); - const privateInteraction = provenAssets[0].methods.transfer(accounts[1].address, privateSendAmount); - - const privateTx = await privateInteraction.prove(); - - // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! - logger.info(`Verifying private kernel tail proof`); - await expect(t.circuitProofVerifier?.verifyProof(privateTx)).resolves.not.toThrow(); - - const sentPrivateTx = privateInteraction.send(); - await sentPrivateTx.wait({ timeout: 1200, interval: 10 }); - tokenSim.transferPrivate(accounts[0].address, accounts[1].address, privateSendAmount); - }, - TIMEOUT, - ); -}); diff --git a/yarn-project/end-to-end/src/e2e_prover_node.test.ts b/yarn-project/end-to-end/src/e2e_prover_node.test.ts deleted file mode 100644 index 5a0a5e3f705..00000000000 --- a/yarn-project/end-to-end/src/e2e_prover_node.test.ts +++ /dev/null @@ -1,165 +0,0 @@ -import { getSchnorrAccount } from '@aztec/accounts/schnorr'; -import { type Archiver, retrieveL2ProofVerifiedEvents } from '@aztec/archiver'; -import { - type AccountWalletWithSecretKey, - type AztecAddress, - type DebugLogger, - EthAddress, - Fr, - SignerlessWallet, - computeSecretHash, - createDebugLogger, - sleep, -} from '@aztec/aztec.js'; -import { StatefulTestContract, TestContract } from '@aztec/noir-contracts.js'; -import { type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; - -import { sendL1ToL2Message } from './fixtures/l1_to_l2_messaging.js'; -import { - type ISnapshotManager, - type SubsystemsContext, - addAccounts, - createSnapshotManager, -} from './fixtures/snapshot_manager.js'; -import { waitForProvenChain } from './fixtures/utils.js'; - -// Tests simple block building with a sequencer that does not upload proofs to L1, -// and then follows with a prover node run (with real proofs disabled, but -// still simulating all circuits via a prover-client), in order to test -// the coordination through L1 between the sequencer and the prover node. -describe('e2e_prover_node', () => { - let ctx: SubsystemsContext; - let wallet: AccountWalletWithSecretKey; - let recipient: AztecAddress; - let contract: StatefulTestContract; - let msgTestContract: TestContract; - - let logger: DebugLogger; - let snapshotManager: ISnapshotManager; - - const msgContent: Fr = Fr.fromString('0xcafe'); - const msgSecret: Fr = Fr.fromString('0xfeca'); - - beforeAll(async () => { - logger = createDebugLogger('aztec:e2e_prover_node'); - snapshotManager = createSnapshotManager(`e2e_prover_node`, process.env.E2E_DATA_PATH, undefined, { - assumeProvenThrough: undefined, - }); - - const testContractOpts = { contractAddressSalt: Fr.ONE, universalDeploy: true }; - await snapshotManager.snapshot( - 'send-l1-to-l2-msg', - async ctx => { - const testContract = TestContract.deploy(new SignerlessWallet(ctx.pxe)).getInstance(testContractOpts); - const msgHash = await sendL1ToL2Message( - { recipient: testContract.address, content: msgContent, secretHash: computeSecretHash(msgSecret) }, - ctx.deployL1ContractsValues, - ); - return { msgHash }; - }, - async (_data, ctx) => { - msgTestContract = await TestContract.deploy(new SignerlessWallet(ctx.pxe)).register(testContractOpts); - }, - ); - - await snapshotManager.snapshot('setup', addAccounts(2, logger, false), async ({ accountKeys }, ctx) => { - const accountManagers = accountKeys.map(ak => getSchnorrAccount(ctx.pxe, ak[0], ak[1], 1)); - await Promise.all(accountManagers.map(a => a.register())); - const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); - wallets.forEach((w, i) => logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); - wallet = wallets[0]; - recipient = wallets[1].getAddress(); - }); - - await snapshotManager.snapshot( - 'deploy-test-contract', - async () => { - logger.info(`Deploying test contract`); - const owner = wallet.getAddress(); - const contract = await StatefulTestContract.deploy(wallet, owner, owner, 42).send().deployed(); - return { contractAddress: contract.address }; - }, - async ({ contractAddress }) => { - contract = await StatefulTestContract.at(contractAddress, wallet); - }, - ); - - ctx = await snapshotManager.setup(); - }); - - it('submits five blocks, then prover proves the first two epochs', async () => { - // wait for the proven chain to catch up with the pending chain before we shut off the prover node - logger.info(`Waiting for proven chain to catch up with pending chain`); - await waitForProvenChain(ctx.aztecNode); - - // Stop the current prover node - await ctx.proverNode.stop(); - - logger.info(`Sending txs`); - const msgSender = ctx.deployL1ContractsValues.walletClient.account.address; - const txReceipt1 = await msgTestContract.methods - .consume_message_from_arbitrary_sender_private(msgContent, msgSecret, EthAddress.fromString(msgSender)) - .send() - .wait(); - logger.info(`Tx #1 ${txReceipt1.txHash} mined in ${txReceipt1.blockNumber}`); - const txReceipt2 = await contract.methods.create_note(recipient, recipient, 10).send().wait(); - logger.info(`Tx #2 ${txReceipt2.txHash} mined in ${txReceipt2.blockNumber}`); - const txReceipt3 = await contract.methods.increment_public_value(recipient, 20).send().wait(); - logger.info(`Tx #3 ${txReceipt3.txHash} mined in ${txReceipt3.blockNumber}`); - const txReceipt4 = await contract.methods.create_note(recipient, recipient, 30).send().wait(); - logger.info(`Tx #4 ${txReceipt4.txHash} mined in ${txReceipt4.blockNumber}`); - const txReceipt5 = await contract.methods.increment_public_value(recipient, 40).send().wait(); - logger.info(`Tx #5 ${txReceipt5.txHash} mined in ${txReceipt5.blockNumber}`); - - // Check everything went well during setup and txs were mined in different blocks - const startBlock = txReceipt1.blockNumber!; - expect(txReceipt2.blockNumber).toEqual(startBlock + 1); - expect(txReceipt3.blockNumber).toEqual(startBlock + 2); - expect(txReceipt4.blockNumber).toEqual(startBlock + 3); - expect(txReceipt5.blockNumber).toEqual(startBlock + 4); - expect(await contract.methods.get_public_value(recipient).simulate()).toEqual(60n); - expect(await contract.methods.summed_values(recipient).simulate()).toEqual(40n); - - // Kick off a prover node - await sleep(1000); - const proverId = Fr.fromString(Buffer.from('awesome-prover', 'utf-8').toString('hex')); - logger.info(`Creating prover node with prover id ${proverId.toString()}`); - // HACK: We have to use the existing archiver to fetch L2 data, since anvil's chain dump/load used by the - // snapshot manager does not include events nor txs, so a new archiver would not "see" old blocks. - const proverConfig: ProverNodeConfig = { - ...ctx.aztecNodeConfig, - proverCoordinationNodeUrl: undefined, - dataDirectory: undefined, - proverId, - proverNodeMaxPendingJobs: 100, - proverNodeEpochSize: 2, - }; - const archiver = ctx.aztecNode.getBlockSource() as Archiver; - const proverNode = await createProverNode(proverConfig, { aztecNodeTxProvider: ctx.aztecNode, archiver }); - - // Prove the first two epochs simultaneously - logger.info(`Starting proof for first epoch ${startBlock}-${startBlock + 1}`); - await proverNode.startProof(startBlock, startBlock + 1); - logger.info(`Starting proof for second epoch ${startBlock + 2}-${startBlock + 3}`); - await proverNode.startProof(startBlock + 2, startBlock + 3); - - // Confirm that we cannot go back to prove an old one - await expect(proverNode.startProof(startBlock, startBlock + 1)).rejects.toThrow(/behind the current world state/i); - - // Await until proofs get submitted - await waitForProvenChain(ctx.aztecNode, startBlock + 3); - expect(await ctx.aztecNode.getProvenBlockNumber()).toEqual(startBlock + 3); - - // Check that the prover id made it to the emitted event - const { publicClient, l1ContractAddresses } = ctx.deployL1ContractsValues; - const logs = await retrieveL2ProofVerifiedEvents(publicClient, l1ContractAddresses.rollupAddress, 1n); - - // Logs for first epoch - expect(logs[logs.length - 2].l2BlockNumber).toEqual(BigInt(startBlock + 1)); - expect(logs[logs.length - 2].proverId.toString()).toEqual(proverId.toString()); - - // Logs for 2nd epoch - expect(logs[logs.length - 1].l2BlockNumber).toEqual(BigInt(startBlock + 3)); - expect(logs[logs.length - 1].proverId.toString()).toEqual(proverId.toString()); - }); -}); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 2155c660e7a..1972e17960e 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -6,6 +6,7 @@ import { type AztecAddress, type AztecNode, BatchCall, + CheatCodes, type CompleteAddress, type DebugLogger, type DeployL1Contracts, @@ -52,6 +53,7 @@ export type SubsystemsContext = { deployL1ContractsValues: DeployL1Contracts; proverNode: ProverNode; watcher: AnvilTestWatcher; + cheatCodes: CheatCodes; }; type SnapshotEntry = { @@ -267,13 +269,16 @@ export async function createAndSyncProverNode( realProofs: false, proverAgentConcurrency: 2, publisherPrivateKey: proverNodePrivateKey, - proverNodeMaxPendingJobs: 100, + proverNodeMaxPendingJobs: 10, + proverNodePollingIntervalMs: 200, + quoteProviderBasisPointFee: 100, + quoteProviderBondAmount: 1000n, }; const proverNode = await createProverNode(proverConfig, { aztecNodeTxProvider: aztecNode, archiver: archiver as Archiver, }); - proverNode.start(); + await proverNode.start(); return proverNode; } @@ -367,6 +372,8 @@ async function setupFromFresh( pxeConfig.dataDirectory = statePath; const pxe = await createPXEService(aztecNode, pxeConfig); + const cheatCodes = await CheatCodes.create(aztecNodeConfig.l1RpcUrl, pxe); + logger.verbose('Deploying auth registry...'); await deployCanonicalAuthRegistry( new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(aztecNodeConfig.l1ChainId, aztecNodeConfig.version)), @@ -390,6 +397,7 @@ async function setupFromFresh( deployL1ContractsValues, proverNode, watcher, + cheatCodes, }; } @@ -455,6 +463,8 @@ async function setupFromState(statePath: string, logger: Logger): Promise ('address' in a ? a.address : a)); const instances = await Promise.all(accountAddressesToDeploy.map(account => sender.getContractInstance(account))); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 5534791b2ed..d13ba870ca2 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -241,7 +241,7 @@ async function setupWithRemoteEnvironment( walletClient, publicClient, }; - const cheatCodes = CheatCodes.create(config.l1RpcUrl, pxeClient!); + const cheatCodes = await CheatCodes.create(config.l1RpcUrl, pxeClient!); const teardown = () => Promise.resolve(); const { l1ChainId: chainId, protocolVersion } = await pxeClient.getNodeInfo(); @@ -462,7 +462,7 @@ export async function setup( } const wallets = numberOfAccounts > 0 ? await createAccounts(pxe, numberOfAccounts) : []; - const cheatCodes = CheatCodes.create(config.l1RpcUrl, pxe!); + const cheatCodes = await CheatCodes.create(config.l1RpcUrl, pxe!); const teardown = async () => { if (aztecNode instanceof AztecNodeService) { diff --git a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts index bc5a5da0390..c6454eb24a4 100644 --- a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts +++ b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts @@ -149,7 +149,7 @@ describe('guides/dapp/testing', () => { await token.methods.redeem_shield(ownerAddress, 100n, secret).send().wait(); // docs:start:calc-slot - cheats = CheatCodes.create(ETHEREUM_HOST, pxe); + cheats = await CheatCodes.create(ETHEREUM_HOST, pxe); // The balances mapping is indexed by user address ownerSlot = cheats.aztec.computeSlotInMap(TokenContract.storage.balances.slot, ownerAddress); // docs:end:calc-slot diff --git a/yarn-project/end-to-end/src/prover-coordination/e2e_json_coordination.test.ts b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts similarity index 99% rename from yarn-project/end-to-end/src/prover-coordination/e2e_json_coordination.test.ts rename to yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts index 1a3daa2a931..18390ba385a 100644 --- a/yarn-project/end-to-end/src/prover-coordination/e2e_json_coordination.test.ts +++ b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts @@ -33,7 +33,7 @@ import { createSnapshotManager, } from '../fixtures/snapshot_manager.js'; -describe('e2e_json_coordination', () => { +describe('e2e_prover_coordination', () => { let ctx: SubsystemsContext; let wallet: AccountWalletWithSecretKey; let recipient: AztecAddress; diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 7a0bc86efe2..146a4724b00 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -91,8 +91,7 @@ export type EnvVar = | 'PROVER_ID' | 'PROVER_JOB_POLL_INTERVAL_MS' | 'PROVER_JOB_TIMEOUT_MS' - | 'PROVER_NODE_DISABLE_AUTOMATIC_PROVING' - | 'PROVER_NODE_EPOCH_SIZE' + | 'PROVER_NODE_POLLING_INTERVAL_MS' | 'PROVER_NODE_MAX_PENDING_JOBS' | 'PROVER_PUBLISH_RETRY_INTERVAL_MS' | 'PROVER_PUBLISHER_PRIVATE_KEY' @@ -103,6 +102,8 @@ export type EnvVar = | 'PXE_DATA_DIRECTORY' | 'PXE_L2_STARTING_BLOCK' | 'PXE_PROVER_ENABLED' + | 'QUOTE_PROVIDER_BASIS_POINT_FEE' + | 'QUOTE_PROVIDER_BOND_AMOUNT' | 'REGISTRY_CONTRACT_ADDRESS' | 'ROLLUP_CONTRACT_ADDRESS' | 'SEQ_ALLOWED_SETUP_FN' diff --git a/yarn-project/foundation/src/config/index.ts b/yarn-project/foundation/src/config/index.ts index 68b14921785..a075be3fccf 100644 --- a/yarn-project/foundation/src/config/index.ts +++ b/yarn-project/foundation/src/config/index.ts @@ -67,6 +67,18 @@ export function numberConfigHelper(defaultVal: number): Pick { + return { + parseEnv: (val: string) => BigInt(val), + defaultValue: defaultVal, + }; +} + /** * Generates parseEnv for an optional numerical config value. */ diff --git a/yarn-project/p2p/package.json b/yarn-project/p2p/package.json index 5d754eea552..5a4260dc257 100644 --- a/yarn-project/p2p/package.json +++ b/yarn-project/p2p/package.json @@ -86,6 +86,7 @@ "tslib": "^2.4.0" }, "devDependencies": { + "@aztec/archiver": "workspace:^", "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/node": "^18.14.6", diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 8e015b5b74c..3d16237dfb1 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -1,3 +1,4 @@ +import { MockBlockSource } from '@aztec/archiver/test'; import { mockEpochProofQuote, mockTx } from '@aztec/circuit-types'; import { retryUntil } from '@aztec/foundation/retry'; import { type AztecKVStore } from '@aztec/kv-store'; @@ -8,7 +9,6 @@ import { expect, jest } from '@jest/globals'; import { type AttestationPool } from '../attestation_pool/attestation_pool.js'; import { type EpochProofQuotePool, type P2PService } from '../index.js'; import { type TxPool } from '../tx_pool/index.js'; -import { MockBlockSource } from './mocks.js'; import { P2PClient } from './p2p_client.js'; /** diff --git a/yarn-project/p2p/src/epoch_proof_quote_pool/test_utils.ts b/yarn-project/p2p/src/epoch_proof_quote_pool/test_utils.ts index baa5f9e2425..a59de8daa3c 100644 --- a/yarn-project/p2p/src/epoch_proof_quote_pool/test_utils.ts +++ b/yarn-project/p2p/src/epoch_proof_quote_pool/test_utils.ts @@ -4,7 +4,7 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { Secp256k1Signer, randomBigInt, randomInt } from '@aztec/foundation/crypto'; export function makeRandomEpochProofQuotePayload(): EpochProofQuotePayload { - return EpochProofQuotePayload.fromFields({ + return EpochProofQuotePayload.from({ basisPointFee: randomInt(10000), bondAmount: 1000000000000000000n, epochToProve: randomBigInt(1000000n), diff --git a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts index 0612f7c81f1..b813c799a69 100644 --- a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts +++ b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts @@ -1,4 +1,5 @@ // An integration test for the p2p client to test req resp protocols +import { MockBlockSource } from '@aztec/archiver/test'; import { type ClientProtocolCircuitVerifier, type WorldStateSynchronizer, mockTx } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -14,7 +15,6 @@ import { generatePrivateKey } from 'viem/accounts'; import { type AttestationPool } from '../../attestation_pool/attestation_pool.js'; import { createP2PClient } from '../../client/index.js'; -import { MockBlockSource } from '../../client/mocks.js'; import { type P2PClient } from '../../client/p2p_client.js'; import { type P2PConfig, getP2PDefaultConfig } from '../../config.js'; import { type EpochProofQuotePool } from '../../epoch_proof_quote_pool/epoch_proof_quote_pool.js'; diff --git a/yarn-project/p2p/tsconfig.json b/yarn-project/p2p/tsconfig.json index fcbafbb11d0..dbffd3db046 100644 --- a/yarn-project/p2p/tsconfig.json +++ b/yarn-project/p2p/tsconfig.json @@ -20,6 +20,9 @@ }, { "path": "../telemetry-client" + }, + { + "path": "../archiver" } ], "include": ["src"] diff --git a/yarn-project/prover-node/package.json b/yarn-project/prover-node/package.json index 1ab15127b80..21c4d635e3e 100644 --- a/yarn-project/prover-node/package.json +++ b/yarn-project/prover-node/package.json @@ -52,8 +52,10 @@ "@aztec/archiver": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", + "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/kv-store": "workspace:^", + "@aztec/l1-artifacts": "workspace:^", "@aztec/prover-client": "workspace:^", "@aztec/sequencer-client": "workspace:^", "@aztec/simulator": "workspace:^", @@ -61,7 +63,8 @@ "@aztec/types": "workspace:^", "@aztec/world-state": "workspace:^", "source-map-support": "^0.5.21", - "tslib": "^2.4.0" + "tslib": "^2.4.0", + "viem": "^2.7.15" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/prover-node/src/config.ts b/yarn-project/prover-node/src/config.ts index 1758e391362..bfca132153d 100644 --- a/yarn-project/prover-node/src/config.ts +++ b/yarn-project/prover-node/src/config.ts @@ -1,7 +1,7 @@ import { type ArchiverConfig, archiverConfigMappings, getArchiverConfigFromEnv } from '@aztec/archiver'; import { type ConfigMappingsType, - booleanConfigHelper, + bigintConfigHelper, getConfigFromMappings, numberConfigHelper, } from '@aztec/foundation/config'; @@ -27,29 +27,42 @@ export type ProverNodeConfig = ArchiverConfig & WorldStateConfig & PublisherConfig & TxSenderConfig & - ProverCoordinationConfig & { - proverNodeDisableAutomaticProving?: boolean; - proverNodeMaxPendingJobs?: number; - proverNodeEpochSize?: number; + ProverCoordinationConfig & + QuoteProviderConfig & { + proverNodeMaxPendingJobs: number; + proverNodePollingIntervalMs: number; }; +export type QuoteProviderConfig = { + quoteProviderBasisPointFee: number; + quoteProviderBondAmount: bigint; +}; + const specificProverNodeConfigMappings: ConfigMappingsType< - Pick + Pick > = { - proverNodeDisableAutomaticProving: { - env: 'PROVER_NODE_DISABLE_AUTOMATIC_PROVING', - description: 'Whether to disable automatic proving of pending blocks seen on L1', - ...booleanConfigHelper(false), - }, proverNodeMaxPendingJobs: { env: 'PROVER_NODE_MAX_PENDING_JOBS', description: 'The maximum number of pending jobs for the prover node', + ...numberConfigHelper(10), + }, + proverNodePollingIntervalMs: { + env: 'PROVER_NODE_POLLING_INTERVAL_MS', + description: 'The interval in milliseconds to poll for new jobs', + ...numberConfigHelper(1000), + }, +}; + +const quoteProviderConfigMappings: ConfigMappingsType = { + quoteProviderBasisPointFee: { + env: 'QUOTE_PROVIDER_BASIS_POINT_FEE', + description: 'The basis point fee to charge for providing quotes', ...numberConfigHelper(100), }, - proverNodeEpochSize: { - env: 'PROVER_NODE_EPOCH_SIZE', - description: 'The number of blocks to prove in a single epoch', - ...numberConfigHelper(2), + quoteProviderBondAmount: { + env: 'QUOTE_PROVIDER_BOND_AMOUNT', + description: 'The bond amount to charge for providing quotes', + ...bigintConfigHelper(1000n), }, }; @@ -60,6 +73,7 @@ export const proverNodeConfigMappings: ConfigMappingsType = { ...getPublisherConfigMappings('PROVER'), ...getTxSenderConfigMappings('PROVER'), ...proverCoordinationConfigMappings, + ...quoteProviderConfigMappings, ...specificProverNodeConfigMappings, }; @@ -71,6 +85,7 @@ export function getProverNodeConfigFromEnv(): ProverNodeConfig { ...getPublisherConfigFromEnv('PROVER'), ...getTxSenderConfigFromEnv('PROVER'), ...getTxProviderConfigFromEnv(), + ...getConfigFromMappings(quoteProviderConfigMappings), ...getConfigFromMappings(specificProverNodeConfigMappings), }; } diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index a909184bd96..97364a15f1c 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -1,6 +1,9 @@ import { type Archiver, createArchiver } from '@aztec/archiver'; import { type AztecNode } from '@aztec/circuit-types'; +import { createEthereumChain } from '@aztec/ethereum'; +import { Buffer32 } from '@aztec/foundation/buffer'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { RollupAbi } from '@aztec/l1-artifacts'; import { createProverClient } from '@aztec/prover-client'; import { L1Publisher } from '@aztec/sequencer-client'; import { createSimulationProvider } from '@aztec/simulator'; @@ -8,10 +11,15 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { createWorldStateSynchronizer } from '@aztec/world-state'; -import { type ProverNodeConfig } from './config.js'; -import { AztecNodeProverCoordination } from './prover-coordination/aztec-node-prover-coordination.js'; +import { createPublicClient, getAddress, getContract, http } from 'viem'; + +import { type ProverNodeConfig, type QuoteProviderConfig } from './config.js'; +import { ClaimsMonitor } from './monitors/claims-monitor.js'; +import { EpochMonitor } from './monitors/epoch-monitor.js'; import { createProverCoordination } from './prover-coordination/factory.js'; import { ProverNode } from './prover-node.js'; +import { SimpleQuoteProvider } from './quote-provider/simple.js'; +import { QuoteSigner } from './quote-signer.js'; /** Creates a new prover node given a config. */ export async function createProverNode( @@ -39,9 +47,17 @@ export async function createProverNode( // REFACTOR: Move publisher out of sequencer package and into an L1-related package const publisher = new L1Publisher(config, telemetry); - const txProvider = deps.aztecNodeTxProvider - ? new AztecNodeProverCoordination(deps.aztecNodeTxProvider) - : createProverCoordination(config); + const txProvider = deps.aztecNodeTxProvider ? deps.aztecNodeTxProvider : createProverCoordination(config); + const quoteProvider = createQuoteProvider(config); + const quoteSigner = createQuoteSigner(config); + + const proverNodeConfig = { + maxPendingJobs: config.proverNodeMaxPendingJobs, + pollingIntervalMs: config.proverNodePollingIntervalMs, + }; + + const claimsMonitor = new ClaimsMonitor(publisher, proverNodeConfig); + const epochMonitor = new EpochMonitor(archiver, proverNodeConfig); return new ProverNode( prover!, @@ -52,11 +68,26 @@ export async function createProverNode( worldStateSynchronizer, txProvider, simulationProvider, + quoteProvider, + quoteSigner, + claimsMonitor, + epochMonitor, telemetry, - { - disableAutomaticProving: config.proverNodeDisableAutomaticProving, - maxPendingJobs: config.proverNodeMaxPendingJobs, - epochSize: config.proverNodeEpochSize, - }, + proverNodeConfig, ); } + +function createQuoteProvider(config: QuoteProviderConfig) { + return new SimpleQuoteProvider(config.quoteProviderBasisPointFee, config.quoteProviderBondAmount); +} + +function createQuoteSigner(config: ProverNodeConfig) { + // REFACTOR: We need a package that just returns an instance of a rollup contract ready to use + const { l1RpcUrl: rpcUrl, l1ChainId: chainId, l1Contracts } = config; + const chain = createEthereumChain(rpcUrl, chainId); + const client = createPublicClient({ chain: chain.chainInfo, transport: http(chain.rpcUrl) }); + const address = getAddress(l1Contracts.rollupAddress.toString()); + const rollupContract = getContract({ address, abi: RollupAbi, client }); + const privateKey = config.publisherPrivateKey; + return QuoteSigner.new(Buffer32.fromString(privateKey), rollupContract); +} diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index f06757dac0a..22c2f640d54 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -29,6 +29,8 @@ export class EpochProvingJob { private uuid: string; constructor( + private epochNumber: bigint, + private blocks: L2Block[], private prover: EpochProver, private publicProcessorFactory: PublicProcessorFactory, private publisher: L1Publisher, @@ -50,28 +52,21 @@ export class EpochProvingJob { } /** - * Proves the given block range and submits the proof to L1. - * @param fromBlock - Start block. - * @param toBlock - Last block (inclusive). + * Proves the given epoch and submits the proof to L1. */ - public async run(fromBlock: number, toBlock: number) { - if (fromBlock > toBlock) { - throw new Error(`Invalid block range: ${fromBlock} to ${toBlock}`); - } - - const epochNumber = fromBlock; // Use starting block number as epoch number - const epochSize = toBlock - fromBlock + 1; - this.log.info(`Starting epoch proving job`, { fromBlock, toBlock, epochNumber, uuid: this.uuid }); + public async run() { + const epochNumber = Number(this.epochNumber); + const epochSize = this.blocks.length; + this.log.info(`Starting epoch proving job`, { epochSize, epochNumber, uuid: this.uuid }); this.state = 'processing'; const timer = new Timer(); try { this.prover.startNewEpoch(epochNumber, epochSize); - let previousHeader = (await this.l2BlockSource.getBlock(fromBlock - 1))?.header; + let previousHeader = await this.l2BlockSource.getBlockHeader(this.blocks[0].number - 1); - for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) { + for (const block of this.blocks) { // Gather all data to prove this block - const block = await this.getBlock(blockNumber); const globalVariables = block.header.globalVariables; const txHashes = block.body.txEffects.map(tx => tx.txHash); const txCount = block.body.numberOfTxsIncludingPadded; @@ -109,11 +104,12 @@ export class EpochProvingJob { this.state = 'awaiting-prover'; const { publicInputs, proof } = await this.prover.finaliseEpoch(); - this.log.info(`Finalised proof for epoch`, { epochNumber, fromBlock, toBlock, uuid: this.uuid }); + this.log.info(`Finalised proof for epoch`, { epochNumber, uuid: this.uuid }); this.state = 'publishing-proof'; - await this.publisher.submitEpochProof({ epochNumber, fromBlock, toBlock, publicInputs, proof }); - this.log.info(`Submitted proof for epoch`, { epochNumber, fromBlock, toBlock, uuid: this.uuid }); + const [fromBlock, toBlock] = [this.blocks[0].number, this.blocks.at(-1)!.number]; + await this.publisher.submitEpochProof({ fromBlock, toBlock, epochNumber, publicInputs, proof }); + this.log.info(`Submitted proof for epoch`, { epochNumber, uuid: this.uuid }); this.state = 'completed'; this.metrics.recordProvingJob(timer); @@ -129,14 +125,6 @@ export class EpochProvingJob { this.prover.cancel(); } - private async getBlock(blockNumber: number): Promise { - const block = await this.l2BlockSource.getBlock(blockNumber); - if (!block) { - throw new Error(`Block ${blockNumber} not found in L2 block source`); - } - return block; - } - private async getTxs(txHashes: TxHash[]): Promise { const txs = await Promise.all( txHashes.map(txHash => this.coordination.getTxByHash(txHash).then(tx => [txHash, tx] as const)), diff --git a/yarn-project/prover-node/src/monitors/claims-monitor.test.ts b/yarn-project/prover-node/src/monitors/claims-monitor.test.ts new file mode 100644 index 00000000000..610284e3caa --- /dev/null +++ b/yarn-project/prover-node/src/monitors/claims-monitor.test.ts @@ -0,0 +1,79 @@ +import { type EpochProofClaim } from '@aztec/circuit-types'; +import { EthAddress } from '@aztec/circuits.js'; +import { sleep } from '@aztec/foundation/sleep'; +import { type L1Publisher } from '@aztec/sequencer-client'; + +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { ClaimsMonitor, type ClaimsMonitorHandler } from './claims-monitor.js'; + +describe('ClaimsMonitor', () => { + let l1Publisher: MockProxy; + let handler: MockProxy; + let claimsMonitor: ClaimsMonitor; + + let publisherAddress: EthAddress; + + beforeEach(() => { + l1Publisher = mock(); + handler = mock(); + + publisherAddress = EthAddress.random(); + l1Publisher.getSenderAddress.mockReturnValue(publisherAddress); + + claimsMonitor = new ClaimsMonitor(l1Publisher, { pollingIntervalMs: 10 }); + }); + + afterEach(async () => { + await claimsMonitor.stop(); + }); + + const makeEpochProofClaim = (epoch: number, bondProvider: EthAddress): MockProxy => { + return { + basisPointFee: 100n, + bondAmount: 10n, + bondProvider, + epochToProve: BigInt(epoch), + proposerClaimant: EthAddress.random(), + }; + }; + + it('should handle a new claim if it belongs to the prover', async () => { + const proofClaim = makeEpochProofClaim(1, publisherAddress); + l1Publisher.getProofClaim.mockResolvedValue(proofClaim); + + claimsMonitor.start(handler); + await sleep(100); + + expect(handler.handleClaim).toHaveBeenCalledWith(proofClaim); + expect(handler.handleClaim).toHaveBeenCalledTimes(1); + }); + + it('should not handle a new claim if it does not belong to the prover', async () => { + const proofClaim = makeEpochProofClaim(1, EthAddress.random()); + l1Publisher.getProofClaim.mockResolvedValue(proofClaim); + + claimsMonitor.start(handler); + await sleep(100); + + expect(handler.handleClaim).not.toHaveBeenCalled(); + }); + + it('should only trigger when a new claim is seen', async () => { + const proofClaim = makeEpochProofClaim(1, publisherAddress); + l1Publisher.getProofClaim.mockResolvedValue(proofClaim); + + claimsMonitor.start(handler); + await sleep(100); + + expect(handler.handleClaim).toHaveBeenCalledWith(proofClaim); + expect(handler.handleClaim).toHaveBeenCalledTimes(1); + + const proofClaim2 = makeEpochProofClaim(2, publisherAddress); + l1Publisher.getProofClaim.mockResolvedValue(proofClaim2); + await sleep(100); + + expect(handler.handleClaim).toHaveBeenCalledWith(proofClaim2); + expect(handler.handleClaim).toHaveBeenCalledTimes(2); + }); +}); diff --git a/yarn-project/prover-node/src/monitors/claims-monitor.ts b/yarn-project/prover-node/src/monitors/claims-monitor.ts new file mode 100644 index 00000000000..abce274866b --- /dev/null +++ b/yarn-project/prover-node/src/monitors/claims-monitor.ts @@ -0,0 +1,52 @@ +import { type EpochProofClaim } from '@aztec/circuit-types'; +import { type EthAddress } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { RunningPromise } from '@aztec/foundation/running-promise'; +import { type L1Publisher } from '@aztec/sequencer-client'; + +export interface ClaimsMonitorHandler { + handleClaim(proofClaim: EpochProofClaim): Promise; +} + +export class ClaimsMonitor { + private runningPromise: RunningPromise; + private log = createDebugLogger('aztec:prover-node:claims-monitor'); + + private handler: ClaimsMonitorHandler | undefined; + private lastClaimEpochNumber: bigint | undefined; + + constructor(private readonly l1Publisher: L1Publisher, private options: { pollingIntervalMs: number }) { + this.runningPromise = new RunningPromise(this.work.bind(this), this.options.pollingIntervalMs); + } + + public start(handler: ClaimsMonitorHandler) { + this.handler = handler; + this.runningPromise.start(); + this.log.info(`Started ClaimsMonitor with prover address ${this.getProverAddress().toString()}`, this.options); + } + + public async stop() { + this.log.verbose('Stopping ClaimsMonitor'); + await this.runningPromise.stop(); + this.log.info('Stopped ClaimsMonitor'); + } + + public async work() { + const proofClaim = await this.l1Publisher.getProofClaim(); + if (!proofClaim) { + return; + } + + if (this.lastClaimEpochNumber === undefined || proofClaim.epochToProve > this.lastClaimEpochNumber) { + this.log.verbose(`Found new claim for epoch ${proofClaim.epochToProve} by ${proofClaim.bondProvider.toString()}`); + if (proofClaim.bondProvider.equals(this.getProverAddress())) { + await this.handler?.handleClaim(proofClaim); + } + this.lastClaimEpochNumber = proofClaim.epochToProve; + } + } + + protected getProverAddress(): EthAddress { + return this.l1Publisher.getSenderAddress(); + } +} diff --git a/yarn-project/prover-node/src/monitors/epoch-monitor.test.ts b/yarn-project/prover-node/src/monitors/epoch-monitor.test.ts new file mode 100644 index 00000000000..f7a0d7dc406 --- /dev/null +++ b/yarn-project/prover-node/src/monitors/epoch-monitor.test.ts @@ -0,0 +1,76 @@ +import { type L2BlockSource } from '@aztec/circuit-types'; +import { sleep } from '@aztec/foundation/sleep'; + +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { EpochMonitor, type EpochMonitorHandler } from './epoch-monitor.js'; + +describe('EpochMonitor', () => { + let l2BlockSource: MockProxy; + let handler: MockProxy; + let epochMonitor: EpochMonitor; + + let lastEpochComplete: bigint = 0n; + + beforeEach(() => { + handler = mock(); + l2BlockSource = mock({ + isEpochComplete(epochNumber) { + return Promise.resolve(epochNumber <= lastEpochComplete); + }, + }); + + epochMonitor = new EpochMonitor(l2BlockSource, { pollingIntervalMs: 10 }); + }); + + afterEach(async () => { + await epochMonitor.stop(); + }); + + it('triggers initial epoch sync', async () => { + l2BlockSource.getL2EpochNumber.mockResolvedValue(10n); + epochMonitor.start(handler); + await sleep(100); + + expect(handler.handleInitialEpochSync).toHaveBeenCalledWith(9n); + }); + + it('does not trigger initial epoch sync on epoch zero', async () => { + l2BlockSource.getL2EpochNumber.mockResolvedValue(0n); + epochMonitor.start(handler); + await sleep(100); + + expect(handler.handleInitialEpochSync).not.toHaveBeenCalled(); + }); + + it('triggers epoch completion', async () => { + lastEpochComplete = 9n; + l2BlockSource.getL2EpochNumber.mockResolvedValue(10n); + epochMonitor.start(handler); + + await sleep(100); + expect(handler.handleEpochCompleted).not.toHaveBeenCalled(); + + lastEpochComplete = 10n; + await sleep(100); + expect(handler.handleEpochCompleted).toHaveBeenCalledWith(10n); + expect(handler.handleEpochCompleted).toHaveBeenCalledTimes(1); + + lastEpochComplete = 11n; + await sleep(100); + expect(handler.handleEpochCompleted).toHaveBeenCalledWith(11n); + expect(handler.handleEpochCompleted).toHaveBeenCalledTimes(2); + }); + + it('triggers epoch completion if initial epoch was already complete', async () => { + // this happens if we start the monitor on the very last slot of an epoch + lastEpochComplete = 10n; + l2BlockSource.getL2EpochNumber.mockResolvedValue(10n); + epochMonitor.start(handler); + + await sleep(100); + expect(handler.handleInitialEpochSync).toHaveBeenCalledWith(9n); + expect(handler.handleEpochCompleted).toHaveBeenCalledWith(10n); + expect(handler.handleEpochCompleted).toHaveBeenCalledTimes(1); + }); +}); diff --git a/yarn-project/prover-node/src/monitors/epoch-monitor.ts b/yarn-project/prover-node/src/monitors/epoch-monitor.ts new file mode 100644 index 00000000000..0f106e38522 --- /dev/null +++ b/yarn-project/prover-node/src/monitors/epoch-monitor.ts @@ -0,0 +1,48 @@ +import { type L2BlockSource } from '@aztec/circuit-types'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { RunningPromise } from '@aztec/foundation/running-promise'; + +export interface EpochMonitorHandler { + handleInitialEpochSync(epochNumber: bigint): Promise; + handleEpochCompleted(epochNumber: bigint): Promise; +} + +export class EpochMonitor { + private runningPromise: RunningPromise; + private log = createDebugLogger('aztec:prover-node:epoch-monitor'); + + private handler: EpochMonitorHandler | undefined; + + private latestEpochNumber: bigint | undefined; + + constructor(private readonly l2BlockSource: L2BlockSource, private options: { pollingIntervalMs: number }) { + this.runningPromise = new RunningPromise(this.work.bind(this), this.options.pollingIntervalMs); + } + + public start(handler: EpochMonitorHandler) { + this.handler = handler; + this.runningPromise.start(); + this.log.info('Started EpochMonitor', this.options); + } + + public async stop() { + await this.runningPromise.stop(); + this.log.info('Stopped EpochMonitor'); + } + + public async work() { + if (!this.latestEpochNumber) { + const epochNumber = await this.l2BlockSource.getL2EpochNumber(); + if (epochNumber > 0n) { + await this.handler?.handleInitialEpochSync(epochNumber - 1n); + } + this.latestEpochNumber = epochNumber; + return; + } + + if (await this.l2BlockSource.isEpochComplete(this.latestEpochNumber)) { + await this.handler?.handleEpochCompleted(this.latestEpochNumber); + this.latestEpochNumber += 1n; + } + } +} diff --git a/yarn-project/prover-node/src/monitors/index.ts b/yarn-project/prover-node/src/monitors/index.ts new file mode 100644 index 00000000000..f3b5be62922 --- /dev/null +++ b/yarn-project/prover-node/src/monitors/index.ts @@ -0,0 +1,2 @@ +export * from './claims-monitor.js'; +export * from './epoch-monitor.js'; diff --git a/yarn-project/prover-node/src/prover-coordination/aztec-node-prover-coordination.ts b/yarn-project/prover-node/src/prover-coordination/aztec-node-prover-coordination.ts deleted file mode 100644 index 3152cd2ebf8..00000000000 --- a/yarn-project/prover-node/src/prover-coordination/aztec-node-prover-coordination.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { AztecNode, EpochProofQuote, ProverCoordination, Tx, TxHash } from '@aztec/circuit-types'; - -/** Implements ProverCoordinator by wrapping an Aztec node */ -export class AztecNodeProverCoordination implements ProverCoordination { - constructor(private node: AztecNode) {} - - getTxByHash(txHash: TxHash): Promise { - return this.node.getTxByHash(txHash); - } - - addEpochProofQuote(quote: EpochProofQuote): Promise { - return this.node.addEpochProofQuote(quote); - } -} diff --git a/yarn-project/prover-node/src/prover-coordination/factory.ts b/yarn-project/prover-node/src/prover-coordination/factory.ts index 71e5ba8a95e..e48720a329c 100644 --- a/yarn-project/prover-node/src/prover-coordination/factory.ts +++ b/yarn-project/prover-node/src/prover-coordination/factory.ts @@ -1,12 +1,10 @@ import { type ProverCoordination, createAztecNodeClient } from '@aztec/circuit-types'; -import { AztecNodeProverCoordination } from './aztec-node-prover-coordination.js'; import { type ProverCoordinationConfig } from './config.js'; export function createProverCoordination(config: ProverCoordinationConfig): ProverCoordination { if (config.proverCoordinationNodeUrl) { - const node = createAztecNodeClient(config.proverCoordinationNodeUrl); - return new AztecNodeProverCoordination(node); + return createAztecNodeClient(config.proverCoordinationNodeUrl); } else { throw new Error(`Aztec Node URL for Tx Provider is not set.`); } diff --git a/yarn-project/prover-node/src/prover-coordination/index.ts b/yarn-project/prover-node/src/prover-coordination/index.ts index 7394c367754..0ded7f3c0c1 100644 --- a/yarn-project/prover-node/src/prover-coordination/index.ts +++ b/yarn-project/prover-node/src/prover-coordination/index.ts @@ -1,3 +1,2 @@ -export * from './aztec-node-prover-coordination.js'; export * from './config.js'; export * from './factory.js'; diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index 669c56527a1..13d115d72cc 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -1,12 +1,20 @@ import { + type EpochProofClaim, + EpochProofQuote, + EpochProofQuotePayload, type EpochProverManager, type L1ToL2MessageSource, + type L2Block, type L2BlockSource, type MerkleTreeAdminOperations, type ProverCoordination, WorldStateRunningState, type WorldStateSynchronizer, } from '@aztec/circuit-types'; +import { EthAddress } from '@aztec/circuits.js'; +import { times } from '@aztec/foundation/collection'; +import { Signature } from '@aztec/foundation/eth-signature'; +import { sleep } from '@aztec/foundation/sleep'; import { type L1Publisher } from '@aztec/sequencer-client'; import { type PublicProcessorFactory, type SimulationProvider } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -15,27 +23,60 @@ import { type ContractDataSource } from '@aztec/types/contracts'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type EpochProvingJob } from './job/epoch-proving-job.js'; -import { ProverNode } from './prover-node.js'; +import { ClaimsMonitor } from './monitors/claims-monitor.js'; +import { EpochMonitor } from './monitors/epoch-monitor.js'; +import { ProverNode, type ProverNodeOptions } from './prover-node.js'; +import { type QuoteProvider } from './quote-provider/index.js'; +import { type QuoteSigner } from './quote-signer.js'; describe('prover-node', () => { + // Prover node dependencies let prover: MockProxy; let publisher: MockProxy; let l2BlockSource: MockProxy; let l1ToL2MessageSource: MockProxy; let contractDataSource: MockProxy; let worldState: MockProxy; - let txProvider: MockProxy; + let coordination: MockProxy; let simulator: MockProxy; + let quoteProvider: MockProxy; + let quoteSigner: MockProxy; + let telemetryClient: NoopTelemetryClient; + let config: ProverNodeOptions; + // Subject under test let proverNode: TestProverNode; + // Quote returned by the provider by default and its completed quote + let partialQuote: Pick; + + // Sample claim + let claim: MockProxy; + + // Blocks returned by the archiver + let blocks: MockProxy[]; + + // Address of the publisher + let address: EthAddress; + // List of all jobs ever created by the test prover node and their dependencies let jobs: { job: MockProxy; cleanUp: (job: EpochProvingJob) => Promise; db: MerkleTreeAdminOperations; + epochNumber: bigint; }[]; + const toQuotePayload = ( + epoch: bigint, + partialQuote: Pick, + ) => EpochProofQuotePayload.from({ ...partialQuote, prover: address, epochToProve: epoch }); + + const toExpectedQuote = ( + epoch: bigint, + quote: Pick = partialQuote, + ) => expect.objectContaining({ payload: toQuotePayload(epoch, quote) }); + beforeEach(() => { prover = mock(); publisher = mock(); @@ -43,132 +84,229 @@ describe('prover-node', () => { l1ToL2MessageSource = mock(); contractDataSource = mock(); worldState = mock(); - txProvider = mock(); + coordination = mock(); simulator = mock(); - const telemetryClient = new NoopTelemetryClient(); + quoteProvider = mock(); + quoteSigner = mock(); + + telemetryClient = new NoopTelemetryClient(); + config = { maxPendingJobs: 3, pollingIntervalMs: 10 }; // World state returns a new mock db every time it is asked to fork worldState.syncImmediateAndFork.mockImplementation(() => Promise.resolve(mock())); + worldState.status.mockResolvedValue({ syncedToL2Block: 1, state: WorldStateRunningState.RUNNING }); + + // Publisher returns its sender address + address = EthAddress.random(); + publisher.getSenderAddress.mockReturnValue(address); + + // Quote provider returns a mock + partialQuote = { basisPointFee: 100, bondAmount: 0n, validUntilSlot: 30n }; + quoteProvider.getQuote.mockResolvedValue(partialQuote); + + // Signer returns an empty signature + quoteSigner.sign.mockImplementation(payload => Promise.resolve(new EpochProofQuote(payload, Signature.empty()))); + + // Archiver returns a bunch of fake blocks + blocks = times(3, i => mock({ number: i + 20 })); + l2BlockSource.getBlocksForEpoch.mockResolvedValue(blocks); + + // A sample claim + claim = { epochToProve: 10n, bondProvider: address } as EpochProofClaim; jobs = []; - proverNode = new TestProverNode( - prover, - publisher, - l2BlockSource, - l1ToL2MessageSource, - contractDataSource, - worldState, - txProvider, - simulator, - telemetryClient, - { maxPendingJobs: 3, pollingIntervalMs: 10, epochSize: 2 }, - ); }); afterEach(async () => { await proverNode.stop(); }); - const setBlockNumbers = (blockNumber: number, provenBlockNumber: number) => { - l2BlockSource.getBlockNumber.mockResolvedValue(blockNumber); - l2BlockSource.getProvenBlockNumber.mockResolvedValue(provenBlockNumber); - worldState.status.mockResolvedValue({ syncedToL2Block: provenBlockNumber, state: WorldStateRunningState.RUNNING }); - }; + describe('with mocked monitors', () => { + let claimsMonitor: MockProxy; + let epochMonitor: MockProxy; - it('proves pending blocks', async () => { - setBlockNumbers(5, 3); + beforeEach(() => { + claimsMonitor = mock(); + epochMonitor = mock(); - await proverNode.work(); - await proverNode.work(); - await proverNode.work(); + proverNode = new TestProverNode( + prover, + publisher, + l2BlockSource, + l1ToL2MessageSource, + contractDataSource, + worldState, + coordination, + simulator, + quoteProvider, + quoteSigner, + claimsMonitor, + epochMonitor, + telemetryClient, + config, + ); + }); - expect(jobs.length).toEqual(1); - expect(jobs[0].job.run).toHaveBeenCalledWith(4, 5); - }); + it('sends a quote on a finished epoch', async () => { + await proverNode.handleEpochCompleted(10n); - it('stops proving when maximum jobs are reached', async () => { - setBlockNumbers(20, 3); + expect(quoteProvider.getQuote).toHaveBeenCalledWith(blocks); + expect(quoteSigner.sign).toHaveBeenCalledWith(expect.objectContaining(partialQuote)); + expect(coordination.addEpochProofQuote).toHaveBeenCalledTimes(1); - await proverNode.work(); - await proverNode.work(); - await proverNode.work(); - await proverNode.work(); + expect(coordination.addEpochProofQuote).toHaveBeenCalledWith(toExpectedQuote(10n)); + }); - expect(jobs.length).toEqual(3); - expect(jobs[0].job.run).toHaveBeenCalledWith(4, 5); - expect(jobs[1].job.run).toHaveBeenCalledWith(6, 7); - expect(jobs[2].job.run).toHaveBeenCalledWith(8, 9); - }); + it('does not send a quote on a finished epoch if the provider does not return one', async () => { + quoteProvider.getQuote.mockResolvedValue(undefined); + await proverNode.handleEpochCompleted(10n); - it('reports on pending jobs', async () => { - setBlockNumbers(8, 3); + expect(quoteSigner.sign).not.toHaveBeenCalled(); + expect(coordination.addEpochProofQuote).not.toHaveBeenCalled(); + }); - await proverNode.work(); - await proverNode.work(); + it('starts proving on a new claim', async () => { + await proverNode.handleClaim(claim); - expect(jobs.length).toEqual(2); - expect(proverNode.getJobs().length).toEqual(2); - expect(proverNode.getJobs()).toEqual([ - { uuid: '0', status: 'processing' }, - { uuid: '1', status: 'processing' }, - ]); - }); + expect(jobs[0].epochNumber).toEqual(10n); + }); + + it('fails to start proving if world state is synced past the first block in the epoch', async () => { + // This test will probably be no longer necessary once we have the proper world state + worldState.status.mockResolvedValue({ syncedToL2Block: 21, state: WorldStateRunningState.RUNNING }); + await proverNode.handleClaim(claim); + + expect(jobs.length).toEqual(0); + }); - it('cleans up jobs when completed', async () => { - setBlockNumbers(20, 3); + it('does not prove the same epoch twice', async () => { + await proverNode.handleClaim(claim); + await proverNode.handleClaim(claim); - await proverNode.work(); - await proverNode.work(); - await proverNode.work(); - await proverNode.work(); + expect(jobs.length).toEqual(1); + }); - expect(jobs.length).toEqual(3); - expect(proverNode.getJobs().length).toEqual(3); + it('sends a quote on the initial sync if there is no claim', async () => { + await proverNode.handleInitialEpochSync(10n); - // Clean up the first job - await jobs[0].cleanUp(jobs[0].job); - expect(proverNode.getJobs().length).toEqual(2); - expect(jobs[0].db.delete).toHaveBeenCalled(); + expect(coordination.addEpochProofQuote).toHaveBeenCalledTimes(1); + }); - // Request another job to run and ensure it gets pushed - await proverNode.work(); - expect(jobs.length).toEqual(4); - expect(jobs[3].job.run).toHaveBeenCalledWith(10, 11); - expect(proverNode.getJobs().length).toEqual(3); - expect(proverNode.getJobs().map(({ uuid }) => uuid)).toEqual(['1', '2', '3']); + it('sends a quote on the initial sync if there is a claim for an older epoch', async () => { + const claim = { epochToProve: 9n, bondProvider: EthAddress.random() } as EpochProofClaim; + publisher.getProofClaim.mockResolvedValue(claim); + await proverNode.handleInitialEpochSync(10n); + + expect(coordination.addEpochProofQuote).toHaveBeenCalledTimes(1); + }); + + it('does not send a quote on the initial sync if there is already a claim', async () => { + const claim = { epochToProve: 10n, bondProvider: EthAddress.random() } as EpochProofClaim; + publisher.getProofClaim.mockResolvedValue(claim); + await proverNode.handleInitialEpochSync(10n); + + expect(coordination.addEpochProofQuote).not.toHaveBeenCalled(); + }); + + it('starts proving if there is a claim sent by us', async () => { + publisher.getProofClaim.mockResolvedValue(claim); + l2BlockSource.getProvenL2EpochNumber.mockResolvedValue(9); + await proverNode.handleInitialEpochSync(10n); + + expect(jobs[0].epochNumber).toEqual(10n); + }); + + it('does not start proving if there is a claim sent by us but proof has already landed', async () => { + publisher.getProofClaim.mockResolvedValue(claim); + l2BlockSource.getProvenL2EpochNumber.mockResolvedValue(10); + await proverNode.handleInitialEpochSync(10n); + + expect(jobs.length).toEqual(0); + }); }); - it('moves forward when proving fails', async () => { - setBlockNumbers(10, 3); + describe('with actual monitors', () => { + let claimsMonitor: ClaimsMonitor; + let epochMonitor: EpochMonitor; + + // Answers l2BlockSource.isEpochComplete, queried from the epoch monitor + let lastEpochComplete: bigint = 0n; + + beforeEach(() => { + claimsMonitor = new ClaimsMonitor(publisher, config); + epochMonitor = new EpochMonitor(l2BlockSource, config); + + l2BlockSource.isEpochComplete.mockImplementation(epochNumber => + Promise.resolve(epochNumber <= lastEpochComplete), + ); + + proverNode = new TestProverNode( + prover, + publisher, + l2BlockSource, + l1ToL2MessageSource, + contractDataSource, + worldState, + coordination, + simulator, + quoteProvider, + quoteSigner, + claimsMonitor, + epochMonitor, + telemetryClient, + config, + ); + }); + + it('sends a quote on initial sync', async () => { + l2BlockSource.getL2EpochNumber.mockResolvedValue(10n); + + await proverNode.start(); + await sleep(100); + expect(coordination.addEpochProofQuote).toHaveBeenCalledTimes(1); + }); + + it('sends another quote when a new epoch is completed', async () => { + lastEpochComplete = 10n; + l2BlockSource.getL2EpochNumber.mockResolvedValue(11n); + + await proverNode.start(); + await sleep(100); + + lastEpochComplete = 11n; + await sleep(100); + + expect(coordination.addEpochProofQuote).toHaveBeenCalledTimes(2); + expect(coordination.addEpochProofQuote).toHaveBeenCalledWith(toExpectedQuote(10n)); + expect(coordination.addEpochProofQuote).toHaveBeenCalledWith(toExpectedQuote(11n)); + }); - // We trigger an error by setting world state past the block that the prover node will try proving - worldState.status.mockResolvedValue({ syncedToL2Block: 7, state: WorldStateRunningState.RUNNING }); + it('starts proving when a claim is seen', async () => { + publisher.getProofClaim.mockResolvedValue(claim); - // These two calls should return in failures - await proverNode.work(); - await proverNode.work(); - expect(jobs.length).toEqual(0); + await proverNode.start(); + await sleep(100); - // But now the prover node should move forward - await proverNode.work(); - expect(jobs.length).toEqual(1); - expect(jobs[0].job.run).toHaveBeenCalledWith(8, 9); + expect(jobs[0].epochNumber).toEqual(10n); + }); }); class TestProverNode extends ProverNode { protected override doCreateEpochProvingJob( + epochNumber: bigint, + _blocks: L2Block[], db: MerkleTreeAdminOperations, _publicProcessorFactory: PublicProcessorFactory, cleanUp: (job: EpochProvingJob) => Promise, ): EpochProvingJob { - const job = mock({ getState: () => 'processing' }); + const job = mock({ getState: () => 'processing', run: () => Promise.resolve() }); job.getId.mockReturnValue(jobs.length.toString()); - jobs.push({ job, cleanUp, db }); + jobs.push({ epochNumber, job, cleanUp, db }); return job; } - public override work() { - return super.work(); + public override triggerMonitors() { + return super.triggerMonitors(); } } }); diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 24617147814..31fb8a3829b 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -1,7 +1,10 @@ import { + type EpochProofClaim, type EpochProofQuote, + EpochProofQuotePayload, type EpochProverManager, type L1ToL2MessageSource, + type L2Block, type L2BlockSource, type MerkleTreeOperations, type ProverCoordination, @@ -9,7 +12,6 @@ import { } from '@aztec/circuit-types'; import { compact } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; -import { RunningPromise } from '@aztec/foundation/running-promise'; import { type L1Publisher } from '@aztec/sequencer-client'; import { PublicProcessorFactory, type SimulationProvider } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; @@ -17,57 +19,130 @@ import { type ContractDataSource } from '@aztec/types/contracts'; import { EpochProvingJob, type EpochProvingJobState } from './job/epoch-proving-job.js'; import { ProverNodeMetrics } from './metrics.js'; +import { type ClaimsMonitor, type ClaimsMonitorHandler } from './monitors/claims-monitor.js'; +import { type EpochMonitor, type EpochMonitorHandler } from './monitors/epoch-monitor.js'; +import { type QuoteProvider } from './quote-provider/index.js'; +import { type QuoteSigner } from './quote-signer.js'; -type ProverNodeOptions = { +export type ProverNodeOptions = { pollingIntervalMs: number; - disableAutomaticProving: boolean; maxPendingJobs: number; - epochSize: number; }; /** * An Aztec Prover Node is a standalone process that monitors the unfinalised chain on L1 for unproven blocks, - * fetches their txs from a tx source in the p2p network or an external node, re-executes their public functions, - * creates a rollup proof, and submits it to L1. + * submits bids for proving them, and monitors if they are accepted. If so, the prover node fetches the txs + * from a tx source in the p2p network or an external node, re-executes their public functions, creates a rollup + * proof for the epoch, and submits it to L1. */ -export class ProverNode { +export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler { private log = createDebugLogger('aztec:prover-node'); - private runningPromise: RunningPromise | undefined; - private latestBlockWeAreProving: number | undefined; + + private latestEpochWeAreProving: bigint | undefined; private jobs: Map = new Map(); private options: ProverNodeOptions; private metrics: ProverNodeMetrics; constructor( - private prover: EpochProverManager, - private publisher: L1Publisher, - private l2BlockSource: L2BlockSource, - private l1ToL2MessageSource: L1ToL2MessageSource, - private contractDataSource: ContractDataSource, - private worldState: WorldStateSynchronizer, - private coordination: ProverCoordination, - private simulator: SimulationProvider, - private telemetryClient: TelemetryClient, + private readonly prover: EpochProverManager, + private readonly publisher: L1Publisher, + private readonly l2BlockSource: L2BlockSource, + private readonly l1ToL2MessageSource: L1ToL2MessageSource, + private readonly contractDataSource: ContractDataSource, + private readonly worldState: WorldStateSynchronizer, + private readonly coordination: ProverCoordination, + private readonly simulator: SimulationProvider, + private readonly quoteProvider: QuoteProvider, + private readonly quoteSigner: QuoteSigner, + private readonly claimsMonitor: ClaimsMonitor, + private readonly epochsMonitor: EpochMonitor, + private readonly telemetryClient: TelemetryClient, options: Partial = {}, ) { this.options = { pollingIntervalMs: 1_000, - disableAutomaticProving: false, maxPendingJobs: 100, - epochSize: 2, ...compact(options), }; this.metrics = new ProverNodeMetrics(telemetryClient, 'ProverNode'); } + async ensureBond() { + // Ensure the prover has enough bond to submit proofs + // Can we just run this at the beginning and forget about it? + // Or do we need to check periodically? Or only when we get slashed? How do we know we got slashed? + } + + async handleClaim(proofClaim: EpochProofClaim): Promise { + if (proofClaim.epochToProve === this.latestEpochWeAreProving) { + this.log.verbose(`Already proving claim for epoch ${proofClaim.epochToProve}`); + return; + } + + try { + await this.startProof(proofClaim.epochToProve); + this.latestEpochWeAreProving = proofClaim.epochToProve; + } catch (err) { + this.log.error(`Error handling claim for epoch ${proofClaim.epochToProve}`, err); + } + } + + /** + * Handles the epoch number to prove when the prover node starts by checking if there + * is an existing claim for it. If not, it creates and sends a quote for it. + * @param epochNumber - The epoch immediately before the current one when the prover node starts. + */ + async handleInitialEpochSync(epochNumber: bigint): Promise { + try { + const claim = await this.publisher.getProofClaim(); + if (!claim || claim.epochToProve < epochNumber) { + await this.handleEpochCompleted(epochNumber); + } else if (claim && claim.bondProvider.equals(this.publisher.getSenderAddress())) { + const lastEpochProven = await this.l2BlockSource.getProvenL2EpochNumber(); + if (!lastEpochProven || lastEpochProven < claim.epochToProve) { + await this.handleClaim(claim); + } + } + } catch (err) { + this.log.error(`Error handling initial epoch sync`, err); + } + } + + /** + * Handles an epoch being completed by sending a quote for proving it. + * @param epochNumber - The epoch number that was just completed. + */ + async handleEpochCompleted(epochNumber: bigint): Promise { + try { + const blocks = await this.l2BlockSource.getBlocksForEpoch(epochNumber); + const partialQuote = await this.quoteProvider.getQuote(blocks); + if (!partialQuote) { + this.log.verbose(`No quote produced for epoch ${epochNumber}`); + return; + } + + const quote = EpochProofQuotePayload.from({ + ...partialQuote, + epochToProve: BigInt(epochNumber), + prover: this.publisher.getSenderAddress(), + validUntilSlot: partialQuote.validUntilSlot ?? BigInt(Number.MAX_SAFE_INTEGER), // Should we constrain this? + }); + const signed = await this.quoteSigner.sign(quote); + await this.sendEpochProofQuote(signed); + } catch (err) { + this.log.error(`Error handling epoch completed`, err); + } + } + /** * Starts the prover node so it periodically checks for unproven blocks in the unfinalised chain from L1 and proves them. * This may change once we implement a prover coordination mechanism. */ - start() { - this.runningPromise = new RunningPromise(this.work.bind(this), this.options.pollingIntervalMs); - this.runningPromise.start(); + async start() { + await this.ensureBond(); + this.epochsMonitor.start(this); + this.claimsMonitor.start(this); this.log.info('Started ProverNode', this.options); } @@ -76,7 +151,8 @@ export class ProverNode { */ async stop() { this.log.info('Stopping ProverNode'); - await this.runningPromise?.stop(); + await this.epochsMonitor.stop(); + await this.claimsMonitor.stop(); await this.prover.stop(); await this.l2BlockSource.stop(); this.publisher.interrupt(); @@ -86,73 +162,27 @@ export class ProverNode { } /** - * Single iteration of recurring work. This method is called periodically by the running promise. - * Checks whether there are new blocks to prove, proves them, and submits them. + * Sends an epoch proof quote to the coordinator. */ - protected async work() { - try { - if (this.options.disableAutomaticProving) { - return; - } - - if (!this.checkMaximumPendingJobs()) { - this.log.debug(`Maximum pending proving jobs reached. Skipping work.`, { - maxPendingJobs: this.options.maxPendingJobs, - pendingJobs: this.jobs.size, - }); - return; - } - - const [latestBlockNumber, latestProvenBlockNumber] = await Promise.all([ - this.l2BlockSource.getBlockNumber(), - this.l2BlockSource.getProvenBlockNumber(), - ]); - - // Consider both the latest block we are proving and the last block proven on the chain - const latestBlockBeingProven = this.latestBlockWeAreProving ?? 0; - const latestProven = Math.max(latestBlockBeingProven, latestProvenBlockNumber); - if (latestBlockNumber - latestProven < this.options.epochSize) { - this.log.debug(`No epoch to prove`, { - latestBlockNumber, - latestProvenBlockNumber, - latestBlockBeingProven, - }); - return; - } - - const fromBlock = latestProven + 1; - const toBlock = fromBlock + this.options.epochSize - 1; - - try { - await this.startProof(fromBlock, toBlock); - } finally { - // If we fail to create a proving job for the given block, skip it instead of getting stuck on it. - this.log.verbose(`Setting ${toBlock} as latest block we are proving`); - this.latestBlockWeAreProving = toBlock; - } - } catch (err) { - this.log.error(`Error in prover node work`, err); - } - } - public sendEpochProofQuote(quote: EpochProofQuote): Promise { + this.log.info(`Sending quote for epoch`, quote.toViemArgs().quote); return this.coordination.addEpochProofQuote(quote); } /** * Creates a proof for a block range. Returns once the proof has been submitted to L1. */ - public async prove(fromBlock: number, toBlock: number) { - const job = await this.createProvingJob(fromBlock); - return job.run(fromBlock, toBlock); + public async prove(epochNumber: number | bigint) { + const job = await this.createProvingJob(BigInt(epochNumber)); + return job.run(); } /** * Starts a proving process and returns immediately. */ - public async startProof(fromBlock: number, toBlock: number) { - const job = await this.createProvingJob(fromBlock); - void job.run(fromBlock, toBlock); + public async startProof(epochNumber: number | bigint) { + const job = await this.createProvingJob(BigInt(epochNumber)); + void job.run().catch(err => this.log.error(`Error proving epoch ${epochNumber}`, err)); } /** @@ -174,17 +204,25 @@ export class ProverNode { return maxPendingJobs === 0 || this.jobs.size < maxPendingJobs; } - private async createProvingJob(fromBlock: number) { + private async createProvingJob(epochNumber: bigint) { if (!this.checkMaximumPendingJobs()) { throw new Error(`Maximum pending proving jobs ${this.options.maxPendingJobs} reached. Cannot create new job.`); } + // Gather blocks for this epoch + const blocks = await this.l2BlockSource.getBlocksForEpoch(epochNumber); + if (blocks.length === 0) { + throw new Error(`No blocks found for epoch ${epochNumber}`); + } + const fromBlock = blocks[0].number; + const toBlock = blocks.at(-1)!.number; + if ((await this.worldState.status()).syncedToL2Block >= fromBlock) { throw new Error(`Cannot create proving job for block ${fromBlock} as it is behind the current world state`); } // Fast forward world state to right before the target block and get a fork - this.log.verbose(`Creating proving job for block ${fromBlock}`); + this.log.verbose(`Creating proving job for epoch ${epochNumber} for block range ${fromBlock} to ${toBlock}`); const db = await this.worldState.syncImmediateAndFork(fromBlock - 1, true); // Create a processor using the forked world state @@ -200,18 +238,22 @@ export class ProverNode { this.jobs.delete(job.getId()); }; - const job = this.doCreateEpochProvingJob(db, publicProcessorFactory, cleanUp); + const job = this.doCreateEpochProvingJob(epochNumber, blocks, db, publicProcessorFactory, cleanUp); this.jobs.set(job.getId(), job); return job; } /** Extracted for testing purposes. */ protected doCreateEpochProvingJob( + epochNumber: bigint, + blocks: L2Block[], db: MerkleTreeOperations, publicProcessorFactory: PublicProcessorFactory, cleanUp: () => Promise, ) { return new EpochProvingJob( + epochNumber, + blocks, this.prover.createEpochProver(db), publicProcessorFactory, this.publisher, @@ -222,4 +264,10 @@ export class ProverNode { cleanUp, ); } + + /** Extracted for testing purposes. */ + protected async triggerMonitors() { + await this.epochsMonitor.work(); + await this.claimsMonitor.work(); + } } diff --git a/yarn-project/prover-node/src/quote-provider/http.ts b/yarn-project/prover-node/src/quote-provider/http.ts new file mode 100644 index 00000000000..b5ada13cbb9 --- /dev/null +++ b/yarn-project/prover-node/src/quote-provider/http.ts @@ -0,0 +1,2 @@ +// TODO: Implement me! This should send a request to a configurable URL to get the quote. +export class HttpQuoteProvider {} diff --git a/yarn-project/prover-node/src/quote-provider/index.ts b/yarn-project/prover-node/src/quote-provider/index.ts new file mode 100644 index 00000000000..770833cb280 --- /dev/null +++ b/yarn-project/prover-node/src/quote-provider/index.ts @@ -0,0 +1,8 @@ +import { type EpochProofQuotePayload, type L2Block } from '@aztec/circuit-types'; + +type QuoteProviderResult = Pick & + Partial>; + +export interface QuoteProvider { + getQuote(epoch: L2Block[]): Promise; +} diff --git a/yarn-project/prover-node/src/quote-provider/simple.ts b/yarn-project/prover-node/src/quote-provider/simple.ts new file mode 100644 index 00000000000..c2eff3fc5c0 --- /dev/null +++ b/yarn-project/prover-node/src/quote-provider/simple.ts @@ -0,0 +1,12 @@ +import { type EpochProofQuotePayload, type L2Block } from '@aztec/circuit-types'; + +import { type QuoteProvider } from './index.js'; + +export class SimpleQuoteProvider implements QuoteProvider { + constructor(public readonly basisPointFee: number, public readonly bondAmount: bigint) {} + + getQuote(_epoch: L2Block[]): Promise> { + const { basisPointFee, bondAmount } = this; + return Promise.resolve({ basisPointFee, bondAmount }); + } +} diff --git a/yarn-project/prover-node/src/quote-provider/utils.ts b/yarn-project/prover-node/src/quote-provider/utils.ts new file mode 100644 index 00000000000..253480476c1 --- /dev/null +++ b/yarn-project/prover-node/src/quote-provider/utils.ts @@ -0,0 +1,10 @@ +import { type L2Block } from '@aztec/circuit-types'; +import { Fr } from '@aztec/circuits.js'; + +export function getTotalFees(epoch: L2Block[]) { + return epoch.reduce((total, block) => total.add(block.header.totalFees), Fr.ZERO).toBigInt(); +} + +export function getTxCount(epoch: L2Block[]) { + return epoch.reduce((total, block) => total + block.body.txEffects.length, 0); +} diff --git a/yarn-project/prover-node/src/quote-signer.ts b/yarn-project/prover-node/src/quote-signer.ts new file mode 100644 index 00000000000..0c2f4bd9c4e --- /dev/null +++ b/yarn-project/prover-node/src/quote-signer.ts @@ -0,0 +1,24 @@ +import { EpochProofQuote, type EpochProofQuotePayload } from '@aztec/circuit-types'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { Secp256k1Signer } from '@aztec/foundation/crypto'; +import { type RollupAbi } from '@aztec/l1-artifacts'; + +import { type GetContractReturnType, type PublicClient } from 'viem'; + +export class QuoteSigner { + constructor( + private readonly signer: Secp256k1Signer, + private readonly quoteToDigest: (payload: EpochProofQuotePayload) => Promise, + ) {} + + static new(privateKey: Buffer32, rollupContract: GetContractReturnType): QuoteSigner { + const quoteToDigest = (payload: EpochProofQuotePayload) => + rollupContract.read.quoteToDigest([payload.toViemArgs()]).then(Buffer32.fromString); + return new QuoteSigner(new Secp256k1Signer(privateKey), quoteToDigest); + } + + public async sign(payload: EpochProofQuotePayload) { + const digest = await this.quoteToDigest(payload); + return EpochProofQuote.new(digest, payload, this.signer); + } +} diff --git a/yarn-project/prover-node/tsconfig.json b/yarn-project/prover-node/tsconfig.json index ed7c2e5772e..b5e774e1b8a 100644 --- a/yarn-project/prover-node/tsconfig.json +++ b/yarn-project/prover-node/tsconfig.json @@ -15,12 +15,18 @@ { "path": "../circuits.js" }, + { + "path": "../ethereum" + }, { "path": "../foundation" }, { "path": "../kv-store" }, + { + "path": "../l1-artifacts" + }, { "path": "../prover-client" }, diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 99df20363d1..79ea85ffae0 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,5 +1,6 @@ import { ConsensusPayload, + type EpochProofClaim, type EpochProofQuote, type L2Block, type TxHash, @@ -30,6 +31,7 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import pick from 'lodash.pick'; import { inspect } from 'util'; import { + type BaseError, ContractFunctionRevertedError, type GetContractReturnType, type Hex, @@ -40,6 +42,7 @@ import { createPublicClient, createWalletClient, encodeFunctionData, + getAbiItem, getAddress, getContract, hexToBytes, @@ -161,8 +164,8 @@ export class L1Publisher { }); } - public getSenderAddress(): Promise { - return Promise.resolve(EthAddress.fromString(this.account.address)); + public getSenderAddress(): EthAddress { + return EthAddress.fromString(this.account.address); } /** @@ -188,11 +191,47 @@ export class L1Publisher { return await this.rollupContract.read.getEpochAtSlot([slotNumber]); } + public async getEpochToProve(): Promise { + try { + return await this.rollupContract.read.getEpochToProve(); + } catch (err: any) { + // If this is a revert with Rollup__NoEpochToProve, it means there is no epoch to prove, so we return undefined + // See https://viem.sh/docs/contract/simulateContract#handling-custom-errors + const errorName = tryGetCustomErrorName(err); + if (errorName === getAbiItem({ abi: RollupAbi, name: 'Rollup__NoEpochToProve' }).name) { + return undefined; + } + throw err; + } + } + + public async getProofClaim(): Promise { + const [epochToProve, basisPointFee, bondAmount, bondProviderHex, proposerClaimantHex] = + await this.rollupContract.read.proofClaim(); + + const bondProvider = EthAddress.fromString(bondProviderHex); + const proposerClaimant = EthAddress.fromString(proposerClaimantHex); + + if (bondProvider.isZero() && proposerClaimant.isZero() && epochToProve === 0n) { + return undefined; + } + + return { + epochToProve, + basisPointFee, + bondAmount, + bondProvider, + proposerClaimant, + }; + } + public async validateProofQuote(quote: EpochProofQuote): Promise { const args = [quote.toViemArgs()] as const; try { await this.rollupContract.read.validateEpochProofRightClaim(args, { account: this.account }); } catch (err) { + const errorName = tryGetCustomErrorName(err); + this.log.verbose(`Proof quote validation failed: ${errorName}`); return undefined; } return quote; @@ -306,7 +345,7 @@ export class L1Publisher { signatures: attestations ?? [], }); - this.log.verbose(`Submitting propose transaction with `); + this.log.verbose(`Submitting propose transaction`); const txHash = proofQuote ? await this.sendProposeAndClaimTx(proposeTxArgs, proofQuote) @@ -632,3 +671,18 @@ export class L1Publisher { function getCalldataGasUsage(data: Uint8Array) { return data.filter(byte => byte === 0).length * 4 + data.filter(byte => byte !== 0).length * 16; } + +function tryGetCustomErrorName(err: any) { + try { + // See https://viem.sh/docs/contract/simulateContract#handling-custom-errors + if (err.name === 'ViemError') { + const baseError = err as BaseError; + const revertError = baseError.walk(err => (err as Error).name === 'ContractFunctionRevertedError'); + if (revertError) { + return (revertError as ContractFunctionRevertedError).data?.errorName; + } + } + } catch (_e) { + return undefined; + } +} diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 3e83d8a40c0..29de9493108 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -501,7 +501,7 @@ export class Sequencer { this.log.info( `Submitted rollup block ${block.number} with ${ processedTxs.length - } transactions duration=${workDuration}ms (Submitter: ${await this.publisher.getSenderAddress()})`, + } transactions duration=${workDuration}ms (Submitter: ${this.publisher.getSenderAddress()})`, ); } catch (err) { this.metrics.recordFailedBlock(); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 5275032f798..0c725d90abe 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -844,6 +844,7 @@ __metadata: version: 0.0.0-use.local resolution: "@aztec/p2p@workspace:p2p" dependencies: + "@aztec/archiver": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" "@aztec/foundation": "workspace:^" @@ -967,8 +968,10 @@ __metadata: "@aztec/archiver": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" + "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^" + "@aztec/l1-artifacts": "workspace:^" "@aztec/prover-client": "workspace:^" "@aztec/sequencer-client": "workspace:^" "@aztec/simulator": "workspace:^" @@ -986,6 +989,7 @@ __metadata: ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 + viem: ^2.7.15 languageName: unknown linkType: soft