diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 51f4ad4f167..36dbb075eb0 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -140,7 +140,7 @@ export class Archiver implements ArchiveSource { } if (blockUntilSynced) { - this.log.info(`Performing initial chain sync...`); + this.log.info(`Performing initial chain sync to rollup contract ${this.rollupAddress.toString()}`); await this.sync(blockUntilSynced); } @@ -165,15 +165,13 @@ export class Archiver implements ArchiveSource { * * This code does not handle reorgs. */ - const l1SynchPoint = await this.store.getSynchPoint(); + const { blocksSynchedTo, messagesSynchedTo } = await this.store.getSynchPoint(); const currentL1BlockNumber = await this.publicClient.getBlockNumber(); - if ( - currentL1BlockNumber <= l1SynchPoint.blocksSynchedTo && - currentL1BlockNumber <= l1SynchPoint.messagesSynchedTo - ) { + if (currentL1BlockNumber <= blocksSynchedTo && currentL1BlockNumber <= messagesSynchedTo) { // chain hasn't moved forward // or it's been rolled back + this.log.debug(`Nothing to sync`, { currentL1BlockNumber, blocksSynchedTo, messagesSynchedTo }); return; } @@ -204,14 +202,14 @@ export class Archiver implements ArchiveSource { this.publicClient, this.inboxAddress, blockUntilSynced, - l1SynchPoint.messagesSynchedTo + 1n, + messagesSynchedTo + 1n, currentL1BlockNumber, ); if (retrievedL1ToL2Messages.retrievedData.length !== 0) { this.log.verbose( `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${ - l1SynchPoint.messagesSynchedTo + 1n + messagesSynchedTo + 1n } and ${currentL1BlockNumber}.`, ); } @@ -225,7 +223,7 @@ export class Archiver implements ArchiveSource { this.publicClient, this.availabilityOracleAddress, blockUntilSynced, - l1SynchPoint.blocksSynchedTo + 1n, + blocksSynchedTo + 1n, currentL1BlockNumber, ); @@ -240,7 +238,7 @@ export class Archiver implements ArchiveSource { this.publicClient, this.rollupAddress, blockUntilSynced, - l1SynchPoint.blocksSynchedTo + 1n, + blocksSynchedTo + 1n, currentL1BlockNumber, nextExpectedL2BlockNum, ); @@ -259,15 +257,11 @@ export class Archiver implements ArchiveSource { (blockMetadata, i) => new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i]), ); - if (blocks.length === 0) { - return; - } else { - this.log.verbose( - `Retrieved ${blocks.length} new L2 blocks between L1 blocks ${ - l1SynchPoint.blocksSynchedTo + 1n - } and ${currentL1BlockNumber}.`, - ); - } + (blocks.length ? this.log.verbose : this.log.debug)( + `Retrieved ${blocks.length || 'no'} new L2 blocks between L1 blocks ${ + blocksSynchedTo + 1n + } and ${currentL1BlockNumber}.`, + ); retrievedBlocks = { lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber, @@ -296,19 +290,27 @@ export class Archiver implements ArchiveSource { }), ); - await this.store.addBlocks(retrievedBlocks); - this.instrumentation.processNewBlocks(retrievedBlocks.retrievedData); + if (retrievedBlocks.retrievedData.length > 0) { + await this.store.addBlocks(retrievedBlocks); + this.instrumentation.processNewBlocks(retrievedBlocks.retrievedData); + const lastL2BlockNumber = retrievedBlocks.retrievedData[retrievedBlocks.retrievedData.length - 1].number; + this.log.verbose(`Processed ${retrievedBlocks.retrievedData.length} new L2 blocks up to ${lastL2BlockNumber}`); + } // Fetch the logs for proven blocks in the block range and update the last proven block number. // Note it's ok to read repeated data here, since we're just using the largest number we see on the logs. - await this.updateLastProvenL2Block(l1SynchPoint.blocksSynchedTo, currentL1BlockNumber); + await this.updateLastProvenL2Block(blocksSynchedTo, currentL1BlockNumber); + + if (retrievedBlocks.retrievedData.length > 0 || blockUntilSynced) { + (blockUntilSynced ? this.log.info : this.log.verbose)(`Synced to L1 block ${currentL1BlockNumber}`); + } } private async updateLastProvenL2Block(fromBlock: bigint, toBlock: bigint) { const logs = await this.publicClient.getLogs({ address: this.rollupAddress.toString(), fromBlock, - toBlock, + toBlock: toBlock + 1n, // toBlock is exclusive strict: true, event: getAbiItem({ abi: RollupAbi, name: 'L2ProofVerified' }), }); @@ -319,7 +321,15 @@ export class Archiver implements ArchiveSource { } const provenBlockNumber = lastLog.args.blockNumber; - await this.store.setProvenL2BlockNumber(Number(provenBlockNumber)); + if (!provenBlockNumber) { + throw new Error(`Missing argument blockNumber from L2ProofVerified event`); + } + + const currentProvenBlockNumber = await this.store.getProvenL2BlockNumber(); + if (provenBlockNumber > currentProvenBlockNumber) { + this.log.verbose(`Updated last proven block number from ${currentProvenBlockNumber} to ${provenBlockNumber}`); + await this.store.setProvenL2BlockNumber(Number(provenBlockNumber)); + } } /** @@ -502,6 +512,11 @@ export class Archiver implements ArchiveSource { return this.store.getProvenL2BlockNumber(); } + /** Forcefully updates the last proven block number. Use for testing. */ + public setProvenBlockNumber(block: number): Promise { + return this.store.setProvenL2BlockNumber(block); + } + public getContractClass(id: Fr): Promise { return this.store.getContractClass(id); } diff --git a/yarn-project/archiver/src/archiver/config.ts b/yarn-project/archiver/src/archiver/config.ts index 362d472f20f..c59e6d1789c 100644 --- a/yarn-project/archiver/src/archiver/config.ts +++ b/yarn-project/archiver/src/archiver/config.ts @@ -1,5 +1,4 @@ -import { type L1ContractAddresses } from '@aztec/ethereum'; -import { EthAddress } from '@aztec/foundation/eth-address'; +import { type L1ContractAddresses, getL1ContractAddressesFromEnv } from '@aztec/ethereum'; /** * There are 2 polling intervals used in this configuration. The first is the archiver polling interval, archiverPollingIntervalMS. @@ -12,6 +11,11 @@ import { EthAddress } from '@aztec/foundation/eth-address'; * The archiver configuration. */ export interface ArchiverConfig { + /** + * URL for an archiver service. If set, will return an archiver client as opposed to starting a new one. + */ + archiverUrl?: string; + /** * The url of the Ethereum RPC node. */ @@ -45,7 +49,7 @@ export interface ArchiverConfig { /** * Optional dir to store data. If omitted will store in memory. */ - dataDirectory?: string; + dataDirectory: string | undefined; /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */ maxLogs?: number; @@ -56,43 +60,24 @@ export interface ArchiverConfig { * Note: If an environment variable is not set, the default value is used. * @returns The archiver configuration. */ -export function getConfigEnvVars(): ArchiverConfig { +export function getArchiverConfigFromEnv(): ArchiverConfig { const { ETHEREUM_HOST, L1_CHAIN_ID, ARCHIVER_POLLING_INTERVAL_MS, ARCHIVER_VIEM_POLLING_INTERVAL_MS, - AVAILABILITY_ORACLE_CONTRACT_ADDRESS, - ROLLUP_CONTRACT_ADDRESS, API_KEY, - INBOX_CONTRACT_ADDRESS, - OUTBOX_CONTRACT_ADDRESS, - REGISTRY_CONTRACT_ADDRESS, - GAS_TOKEN_CONTRACT_ADDRESS, - GAS_PORTAL_CONTRACT_ADDRESS, DATA_DIRECTORY, + ARCHIVER_URL, } = process.env; - // Populate the relevant addresses for use by the archiver. - const addresses: L1ContractAddresses = { - availabilityOracleAddress: AVAILABILITY_ORACLE_CONTRACT_ADDRESS - ? EthAddress.fromString(AVAILABILITY_ORACLE_CONTRACT_ADDRESS) - : EthAddress.ZERO, - rollupAddress: ROLLUP_CONTRACT_ADDRESS ? EthAddress.fromString(ROLLUP_CONTRACT_ADDRESS) : EthAddress.ZERO, - registryAddress: REGISTRY_CONTRACT_ADDRESS ? EthAddress.fromString(REGISTRY_CONTRACT_ADDRESS) : EthAddress.ZERO, - inboxAddress: INBOX_CONTRACT_ADDRESS ? EthAddress.fromString(INBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, - outboxAddress: OUTBOX_CONTRACT_ADDRESS ? EthAddress.fromString(OUTBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, - gasTokenAddress: GAS_TOKEN_CONTRACT_ADDRESS ? EthAddress.fromString(GAS_TOKEN_CONTRACT_ADDRESS) : EthAddress.ZERO, - gasPortalAddress: GAS_PORTAL_CONTRACT_ADDRESS - ? EthAddress.fromString(GAS_PORTAL_CONTRACT_ADDRESS) - : EthAddress.ZERO, - }; return { rpcUrl: ETHEREUM_HOST || '', l1ChainId: L1_CHAIN_ID ? +L1_CHAIN_ID : 31337, // 31337 is the default chain id for anvil archiverPollingIntervalMS: ARCHIVER_POLLING_INTERVAL_MS ? +ARCHIVER_POLLING_INTERVAL_MS : 1_000, viemPollingIntervalMS: ARCHIVER_VIEM_POLLING_INTERVAL_MS ? +ARCHIVER_VIEM_POLLING_INTERVAL_MS : 1_000, apiKey: API_KEY, - l1Contracts: addresses, + l1Contracts: getL1ContractAddressesFromEnv(), dataDirectory: DATA_DIRECTORY, + archiverUrl: ARCHIVER_URL, }; } diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts new file mode 100644 index 00000000000..ad7d6cfcdb2 --- /dev/null +++ b/yarn-project/archiver/src/factory.ts @@ -0,0 +1,23 @@ +import { type AztecKVStore } from '@aztec/kv-store'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import { Archiver } from './archiver/archiver.js'; +import { type ArchiverConfig } from './archiver/config.js'; +import { KVArchiverDataStore } from './archiver/index.js'; +import { createArchiverClient } from './rpc/archiver_client.js'; + +export function createArchiver( + config: ArchiverConfig, + store: AztecKVStore, + telemetry: TelemetryClient = new NoopTelemetryClient(), + opts: { blockUntilSync: boolean } = { blockUntilSync: true }, +) { + if (!config.archiverUrl) { + // first create and sync the archiver + const archiverStore = new KVArchiverDataStore(store, config.maxLogs); + return Archiver.createAndSync(config, archiverStore, telemetry, opts.blockUntilSync); + } else { + return createArchiverClient(config.archiverUrl); + } +} diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index cf4549e81a0..b961866522f 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -5,11 +5,12 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { createPublicClient, http } from 'viem'; import { localhost } from 'viem/chains'; -import { Archiver, getConfigEnvVars } from './archiver/index.js'; +import { Archiver, getArchiverConfigFromEnv } from './archiver/index.js'; import { MemoryArchiverStore } from './archiver/memory_archiver_store/memory_archiver_store.js'; export * from './archiver/index.js'; export * from './rpc/index.js'; +export * from './factory.js'; const log = createDebugLogger('aztec:archiver'); @@ -18,7 +19,7 @@ const log = createDebugLogger('aztec:archiver'); */ // eslint-disable-next-line require-await async function main() { - const config = getConfigEnvVars(); + const config = getArchiverConfigFromEnv(); const { rpcUrl, l1Contracts } = config; const publicClient = createPublicClient({ diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index 54a73db1eb6..8d2f883f1bf 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -1,8 +1,8 @@ -import { type ArchiverConfig, getConfigEnvVars as getArchiverVars } from '@aztec/archiver'; +import { type ArchiverConfig, getArchiverConfigFromEnv as getArchiverVars } from '@aztec/archiver'; import { type P2PConfig, getP2PConfigEnvVars } from '@aztec/p2p'; import { type ProverClientConfig, getProverEnvVars } from '@aztec/prover-client'; import { type SequencerClientConfig, getConfigEnvVars as getSequencerVars } from '@aztec/sequencer-client'; -import { getConfigEnvVars as getWorldStateVars } from '@aztec/world-state'; +import { type WorldStateConfig, getWorldStateConfigFromEnv as getWorldStateVars } from '@aztec/world-state'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; @@ -14,15 +14,13 @@ import { fileURLToPath } from 'url'; export type AztecNodeConfig = ArchiverConfig & SequencerClientConfig & ProverClientConfig & + WorldStateConfig & P2PConfig & { /** Whether the sequencer is disabled for this node. */ disableSequencer: boolean; /** Whether the prover is disabled for this node. */ disableProver: boolean; - - /** A URL for an archiver service that the node will use. */ - archiverUrl?: string; }; /** @@ -30,7 +28,7 @@ export type AztecNodeConfig = ArchiverConfig & * @returns A valid aztec node config. */ export function getConfigEnvVars(): AztecNodeConfig { - const { SEQ_DISABLED, PROVER_DISABLED = '', ARCHIVER_URL } = process.env; + const { SEQ_DISABLED, PROVER_DISABLED = '' } = process.env; const allEnvVars: AztecNodeConfig = { ...getSequencerVars(), @@ -39,7 +37,6 @@ export function getConfigEnvVars(): AztecNodeConfig { ...getWorldStateVars(), ...getProverEnvVars(), disableSequencer: !!SEQ_DISABLED, - archiverUrl: ARCHIVER_URL, disableProver: ['1', 'true'].includes(PROVER_DISABLED), }; diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index f069de3afbd..0fb6a75f9d4 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -1,4 +1,4 @@ -import { type ArchiveSource, Archiver, KVArchiverDataStore, createArchiverClient } from '@aztec/archiver'; +import { createArchiver } from '@aztec/archiver'; import { BBCircuitVerifier, TestCircuitVerifier } from '@aztec/bb-prover'; import { AggregateTxValidator, @@ -51,8 +51,7 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { sha256Trunc } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; -import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; -import { initStoreForRollup, openTmpStore } from '@aztec/kv-store/utils'; +import { createStore, openTmpStore } from '@aztec/kv-store/utils'; import { SHA256Trunc, StandardTree, UnbalancedTree } from '@aztec/merkle-tree'; import { AztecKVTxPool, type P2P, createP2PClient } from '@aztec/p2p'; import { getCanonicalClassRegisterer } from '@aztec/protocol-contracts/class-registerer'; @@ -60,9 +59,9 @@ import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance-deployer'; import { getCanonicalKeyRegistryAddress } from '@aztec/protocol-contracts/key-registry'; import { getCanonicalMultiCallEntrypointAddress } from '@aztec/protocol-contracts/multi-call-entrypoint'; -import { TxProver } from '@aztec/prover-client'; +import { createProverClient } from '@aztec/prover-client'; import { type GlobalVariableBuilder, SequencerClient, getGlobalVariableBuilder } from '@aztec/sequencer-client'; -import { PublicProcessorFactory, WASMSimulator } from '@aztec/simulator'; +import { PublicProcessorFactory, WASMSimulator, createSimulationProvider } from '@aztec/simulator'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { @@ -71,16 +70,9 @@ import { type ContractInstanceWithAddress, type ProtocolContractAddresses, } from '@aztec/types/contracts'; -import { - MerkleTrees, - ServerWorldStateSynchronizer, - type WorldStateConfig, - type WorldStateSynchronizer, - getConfigEnvVars as getWorldStateConfig, -} from '@aztec/world-state'; +import { MerkleTrees, type WorldStateSynchronizer, createWorldStateSynchronizer } from '@aztec/world-state'; import { type AztecNodeConfig, getPackageInfo } from './config.js'; -import { getSimulationProvider } from './simulator-factory.js'; import { MetadataTxValidator } from './tx_validator/tx_metadata_validator.js'; import { TxProofValidator } from './tx_validator/tx_proof_validator.js'; @@ -140,32 +132,19 @@ export class AztecNodeService implements AztecNode { ); } - const store = await initStoreForRollup( - AztecLmdbStore.open(config.dataDirectory, false, storeLog), - config.l1Contracts.rollupAddress, - storeLog, - ); + const store = await createStore(config, config.l1Contracts.rollupAddress, storeLog); - let archiver: ArchiveSource; - if (!config.archiverUrl) { - // first create and sync the archiver - const archiverStore = new KVArchiverDataStore(store, config.maxLogs); - archiver = await Archiver.createAndSync(config, archiverStore, telemetry, true); - } else { - archiver = createArchiverClient(config.archiverUrl); - } + const archiver = await createArchiver(config, store, telemetry, { blockUntilSync: true }); // we identify the P2P transaction protocol by using the rollup contract address. // this may well change in future config.transactionProtocol = `/aztec/tx/${config.l1Contracts.rollupAddress.toString()}`; // create the tx pool and the p2p client, which will need the l2 block source - const p2pClient = await createP2PClient(store, config, new AztecKVTxPool(store, telemetry), archiver); + const p2pClient = await createP2PClient(config, store, new AztecKVTxPool(store, telemetry), archiver); // now create the merkle trees and the world state synchronizer - const merkleTrees = await MerkleTrees.new(store); - const worldStateConfig: WorldStateConfig = getWorldStateConfig(); - const worldStateSynchronizer = new ServerWorldStateSynchronizer(store, merkleTrees, archiver, worldStateConfig); + const worldStateSynchronizer = await createWorldStateSynchronizer(config, store, archiver); // start both and wait for them to sync from the block source await Promise.all([p2pClient.start(), worldStateSynchronizer.start()]); @@ -176,18 +155,9 @@ export class AztecNodeService implements AztecNode { new TxProofValidator(proofVerifier), ); - // start the prover if we have been told to - const simulationProvider = await getSimulationProvider(config, log); - const prover = config.disableProver - ? undefined - : await TxProver.new( - config, - worldStateSynchronizer, - telemetry, - await archiver - .getBlock(-1) - .then(b => b?.header ?? worldStateSynchronizer.getCommitted().buildInitialHeader()), - ); + const simulationProvider = await createSimulationProvider(config, log); + + const prover = await createProverClient(config, worldStateSynchronizer, archiver, telemetry); if (!prover && !config.disableSequencer) { throw new Error("Can't start a sequencer without a prover"); @@ -241,6 +211,10 @@ export class AztecNodeService implements AztecNode { return this.prover; } + public getBlockSource(): L2BlockSource { + return this.blockSource; + } + /** * Method to return the currently deployed L1 contract addresses. * @returns - The currently deployed L1 contract addresses. @@ -284,6 +258,10 @@ export class AztecNodeService implements AztecNode { return await this.blockSource.getBlockNumber(); } + public async getProvenBlockNumber(): Promise { + return await this.blockSource.getProvenBlockNumber(); + } + /** * Method to fetch the version of the package. * @returns The node package version @@ -363,8 +341,7 @@ export class AztecNodeService implements AztecNode { // We first check if the tx is in pending (instead of first checking if it is mined) because if we first check // for mined and then for pending there could be a race condition where the tx is mined between the two checks // and we would incorrectly return a TxReceipt with status DROPPED - const pendingTx = await this.getPendingTxByHash(txHash); - if (pendingTx) { + if (this.p2pClient.getTxStatus(txHash) === 'pending') { txReceipt = new TxReceipt(txHash, TxStatus.PENDING, ''); } @@ -397,17 +374,17 @@ export class AztecNodeService implements AztecNode { * Method to retrieve pending txs. * @returns - The pending txs. */ - public async getPendingTxs() { - return await this.p2pClient!.getTxs(); + public getPendingTxs() { + return Promise.resolve(this.p2pClient!.getTxs('pending')); } /** - * Method to retrieve a single pending tx. + * Method to retrieve a single tx from the mempool or unfinalised chain. * @param txHash - The transaction hash to return. - * @returns - The pending tx if it exists. + * @returns - The tx if it exists. */ - public async getPendingTxByHash(txHash: TxHash) { - return await this.p2pClient!.getTxByHash(txHash); + public getTxByHash(txHash: TxHash) { + return Promise.resolve(this.p2pClient!.getTxByHash(txHash)); } /** diff --git a/yarn-project/aztec-node/src/aztec-node/simulator-factory.ts b/yarn-project/aztec-node/src/aztec-node/simulator-factory.ts deleted file mode 100644 index 9d947ae8665..00000000000 --- a/yarn-project/aztec-node/src/aztec-node/simulator-factory.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { type DebugLogger } from '@aztec/foundation/log'; -import { NativeACVMSimulator, type SimulationProvider, WASMSimulator } from '@aztec/simulator'; - -import * as fs from 'fs/promises'; - -import { type AztecNodeConfig } from './config.js'; - -export async function getSimulationProvider( - config: AztecNodeConfig, - logger?: DebugLogger, -): Promise { - if (config.acvmBinaryPath && config.acvmWorkingDirectory) { - try { - await fs.access(config.acvmBinaryPath, fs.constants.R_OK); - await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); - logger?.info( - `Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`, - ); - return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath); - } catch { - logger?.warn(`Failed to access ACVM at ${config.acvmBinaryPath}, falling back to WASM`); - } - } - logger?.info('Using WASM ACVM simulation'); - return new WASMSimulator(); -} diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 0d5dffdc497..11c5b7fc3bb 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -152,6 +152,9 @@ export abstract class BaseWallet implements Wallet { getBlockNumber(): Promise { return this.pxe.getBlockNumber(); } + getProvenBlockNumber(): Promise { + return this.pxe.getProvenBlockNumber(); + } getNodeInfo(): Promise { return this.pxe.getNodeInfo(); } diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 85791746941..360a4cf7594 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -48,6 +48,7 @@ "@aztec/p2p-bootstrap": "workspace:^", "@aztec/protocol-contracts": "workspace:^", "@aztec/prover-client": "workspace:^", + "@aztec/prover-node": "workspace:^", "@aztec/pxe": "workspace:^", "@aztec/telemetry-client": "workspace:^", "@aztec/txe": "workspace:^", diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 35bb29f19f2..0d7101bae32 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -32,7 +32,8 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge .option('-px, --pxe [options]', cliTexts.pxe) .option('-a, --archiver [options]', cliTexts.archiver) .option('-s, --sequencer [options]', cliTexts.sequencer) - .option('-r, --prover [options]', cliTexts.prover) + .option('-r, --prover [options]', cliTexts.proverAgent) + .option('-o, --prover-node [options]', cliTexts.proverNode) .option('-p2p, --p2p-bootstrap [options]', cliTexts.p2pBootstrap) .option('-t, --txe [options]', cliTexts.txe) .action(async options => { @@ -69,6 +70,9 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge if (options.node) { const { startNode } = await import('./cmds/start_node.js'); services = await startNode(options, signalHandlers, userLog); + } else if (options.proverNode) { + const { startProverNode } = await import('./cmds/start_prover_node.js'); + services = await startProverNode(options, signalHandlers, userLog); } else if (options.pxe) { const { startPXE } = await import('./cmds/start_pxe.js'); services = await startPXE(options, signalHandlers, userLog); @@ -78,12 +82,18 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge } else if (options.p2pBootstrap) { const { startP2PBootstrap } = await import('./cmds/start_p2p_bootstrap.js'); await startP2PBootstrap(options, userLog, debugLogger); - } else if (options.prover) { - const { startProver } = await import('./cmds/start_prover.js'); - services = await startProver(options, signalHandlers, userLog); + } else if (options.proverAgent) { + const { startProverAgent } = await import('./cmds/start_prover_agent.js'); + services = await startProverAgent(options, signalHandlers, userLog); } else if (options.txe) { const { startTXE } = await import('./cmds/start_txe.js'); startTXE(options, debugLogger); + } else if (options.sequencer) { + userLog(`Cannot run a standalone sequencer without a node`); + process.exit(1); + } else { + userLog(`No module specified to start`); + process.exit(1); } } installSignalHandlers(debugLogger.info, signalHandlers); diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index ad2f296d958..4d6e487e532 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -3,7 +3,7 @@ import { type ArchiverConfig, KVArchiverDataStore, createArchiverRpcServer, - getConfigEnvVars as getArchiverConfigEnvVars, + getArchiverConfigFromEnv as getArchiverConfigEnvVars, } from '@aztec/archiver'; import { createDebugLogger } from '@aztec/aztec.js'; import { type ServerList } from '@aztec/foundation/json-rpc/server'; diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 6ef8e67fe4b..a24e537e18c 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -34,14 +34,20 @@ export const startNode = async ( // merge env vars and cli options let nodeConfig = mergeEnvVarsAndCliOptions(aztecNodeConfigEnvVars, nodeCliOptions); + if (options.proverNode) { + // TODO(palla/prover-node) We need to tweak the semantics of disableProver so that it doesn't inject + // a null prover into the sequencer, but instead injects a circuit simulator, which is what the + // sequencer ultimately needs. + userLog(`Running a Prover Node within a Node is not yet supported`); + process.exit(1); + } + // Deploy contracts if needed - if (nodeCliOptions.deployAztecContracts || DEPLOY_AZTEC_CONTRACTS === 'true') { - let account; - if (nodeConfig.publisherPrivateKey === NULL_KEY) { - account = mnemonicToAccount(MNEMONIC); - } else { - account = privateKeyToAccount(nodeConfig.publisherPrivateKey); - } + if (nodeCliOptions.deployAztecContracts || ['1', 'true'].includes(DEPLOY_AZTEC_CONTRACTS ?? '')) { + const account = + nodeConfig.publisherPrivateKey === NULL_KEY + ? mnemonicToAccount(MNEMONIC) + : privateKeyToAccount(nodeConfig.publisherPrivateKey); await deployContractsToL1(nodeConfig, account); } @@ -81,6 +87,8 @@ export const startNode = async ( } if (!nodeConfig.disableSequencer && nodeConfig.disableProver) { + // TODO(palla/prover-node) Sequencer should not need a prover unless we are running the prover + // within it, it should just need a circuit simulator. We need to refactor the sequencer so it can accept either. throw new Error('Cannot run a sequencer without a prover'); } diff --git a/yarn-project/aztec/src/cli/cmds/start_prover.ts b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts similarity index 96% rename from yarn-project/aztec/src/cli/cmds/start_prover.ts rename to yarn-project/aztec/src/cli/cmds/start_prover_agent.ts index 64fd693f9ed..470756a373a 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts @@ -9,7 +9,7 @@ import { import { type ServiceStarter, parseModuleOptions } from '../util.js'; -export const startProver: ServiceStarter = async (options, signalHandlers, logger) => { +export const startProverAgent: ServiceStarter = async (options, signalHandlers, logger) => { const proverOptions = { ...getProverEnvVars(), ...parseModuleOptions(options.prover), diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts new file mode 100644 index 00000000000..43058a54fed --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts @@ -0,0 +1,84 @@ +import { createAztecNodeClient } from '@aztec/circuit-types'; +import { NULL_KEY } from '@aztec/ethereum'; +import { type ServerList } from '@aztec/foundation/json-rpc/server'; +import { type LogFn } from '@aztec/foundation/log'; +import { createProvingJobSourceServer } from '@aztec/prover-client/prover-agent'; +import { + type ProverNodeConfig, + createProverNode, + createProverNodeRpcServer, + getProverNodeConfigFromEnv, +} from '@aztec/prover-node'; +import { + createAndStartTelemetryClient, + getConfigEnvVars as getTelemetryClientConfig, +} from '@aztec/telemetry-client/start'; + +import { mnemonicToAccount } from 'viem/accounts'; + +import { MNEMONIC } from '../../sandbox.js'; +import { mergeEnvVarsAndCliOptions, parseModuleOptions } from '../util.js'; + +export const startProverNode = async ( + options: any, + signalHandlers: (() => Promise)[], + userLog: LogFn, +): Promise => { + // Services that will be started in a single multi-rpc server + const services: ServerList = []; + + const envVars = getProverNodeConfigFromEnv(); + const cliOptions = parseModuleOptions(options.proverNode); + let proverConfig = mergeEnvVarsAndCliOptions(envVars, cliOptions); + + if (options.node || options.sequencer || options.pxe || options.p2pBootstrap || options.txe) { + userLog(`Starting a prover-node with --node, --sequencer, --pxe, --p2p-bootstrap, or --txe is not supported.`); + process.exit(1); + } + + if (options.archiver) { + proverConfig = mergeEnvVarsAndCliOptions(proverConfig, parseModuleOptions(options.archiver)); + } else if (!proverConfig.archiverUrl) { + userLog('Archiver URL is required to start a Prover Node without --archiver option'); + process.exit(1); + } + + if (options.prover) { + userLog(`Running prover node with local prover agent.`); + proverConfig = mergeEnvVarsAndCliOptions(proverConfig, parseModuleOptions(options.prover)); + proverConfig.proverAgentEnabled = true; + } else { + userLog(`Running prover node without local prover agent. Connect one or more prover agents to this node.`); + proverConfig.proverAgentEnabled = false; + } + + if (proverConfig.publisherPrivateKey === NULL_KEY || !proverConfig.publisherPrivateKey) { + const hdAccount = mnemonicToAccount(MNEMONIC); + const privKey = hdAccount.getHdKey().privateKey; + proverConfig.publisherPrivateKey = `0x${Buffer.from(privKey!).toString('hex')}`; + } + + // TODO(palla/prover-node) L1 contract addresses should not silently default to zero, + // they should be undefined if not set and fail loudly. + // Load l1 contract addresses from aztec node if not set. + if (proverConfig.nodeUrl && proverConfig.l1Contracts.rollupAddress.isZero()) { + proverConfig.l1Contracts = await createAztecNodeClient(proverConfig.nodeUrl).getL1ContractAddresses(); + } + + const telemetry = createAndStartTelemetryClient(getTelemetryClientConfig()); + const proverNode = await createProverNode(proverConfig, { telemetry }); + + services.push({ node: createProverNodeRpcServer(proverNode) }); + + if (options.prover) { + const provingJobSource = createProvingJobSourceServer(proverNode.getProver().getProvingJobSource()); + services.push({ provingJobSource }); + } + + signalHandlers.push(proverNode.stop); + + // Automatically start proving unproven blocks + proverNode.start(); + + return services; +}; diff --git a/yarn-project/aztec/src/cli/texts.ts b/yarn-project/aztec/src/cli/texts.ts index 77f775b373d..af90eb09731 100644 --- a/yarn-project/aztec/src/cli/texts.ts +++ b/yarn-project/aztec/src/cli/texts.ts @@ -71,11 +71,19 @@ export const cliTexts = { 'allowedFeePaymentContractClasses:SEQ_FPC_CLASSES - string[] - Which fee payment contract classes the sequencer allows' + 'allowedFeePaymentContractInstances:SEQ_FPC_INSTANCES - string[] - Which fee payment contracts the sequencer allows.' + contractAddresses, - prover: - 'Starts a Prover with options. If started additionally to --node, the Prover will attach to that node.\n' + + proverAgent: + 'Starts a Prover Agent with options. If started additionally to --node or --prover-node, the Prover will attach to that node.\n' + 'Available options are listed below as cliProperty:ENV_VARIABLE_NAME.\n' + 'acvmBinaryPath:ACVM_BINARY_PATH - string - The full path to an instance of the acvm cli application. If not provided will fallback to WASM circuit simulation\n' + 'acvmWorkingDirectory:ACVM_WORKING_DIRECTORY - string - A directory to use for temporary files used by the acvm application. If not provided WASM circuit simulation will be used\n', + proverNode: + 'Starts a Prover Node with options. The node will automatically check for unproven blocks on L1 and attempt to prove them, and then upload the proof back to L1.\n' + + 'Available options are listed below as cliProperty:ENV_VARIABLE_NAME.\n' + + 'rcpUrl:ETHEREUM_HOST - string - The host of the Ethereum node to connect to. Default: http://localhost:8545\n' + + 'archiverUrl:ARCHIVER_URL - string - A URL for an archiver service that the node will use. Run with --archiver to spin up an archiver within this node.\n' + + 'nodeUrl:AZTEC_NODE_URL - string - The URL of the Aztec Node to connect to to fetch tx client proofs.\n' + + 'dataDirectory:DATA_DIRECTORY - string - Where to store node data. If not set, will store temporarily.\n' + + contractAddresses, p2pBootstrap: 'Starts a P2P bootstrap node with options.\n' + 'Available options are listed below as cliProperty:ENV_VARIABLE_NAME.\n' + diff --git a/yarn-project/aztec/tsconfig.json b/yarn-project/aztec/tsconfig.json index e738c2ced35..19fb94f3aca 100644 --- a/yarn-project/aztec/tsconfig.json +++ b/yarn-project/aztec/tsconfig.json @@ -66,6 +66,9 @@ { "path": "../prover-client" }, + { + "path": "../prover-node" + }, { "path": "../pxe" }, diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index a79a28ece15..4973715724f 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -176,6 +176,13 @@ export interface AztecNode { * @returns The block number. */ getBlockNumber(): Promise; + + /** + * Fetches the latest proven block number. + * @returns The block number. + */ + getProvenBlockNumber(): Promise; + /** * Method to determine if the node is ready to accept transactions. * @returns - Flag indicating the readiness for tx submission. @@ -281,7 +288,7 @@ export interface AztecNode { * @param txHash - The transaction hash to return. * @returns The pending tx if it exists. */ - getPendingTxByHash(txHash: TxHash): Promise; + getTxByHash(txHash: TxHash): Promise; /** * Gets the storage value at the given contract storage slot. diff --git a/yarn-project/circuit-types/src/interfaces/configs.ts b/yarn-project/circuit-types/src/interfaces/configs.ts index 389c76f15f5..cd75cef56da 100644 --- a/yarn-project/circuit-types/src/interfaces/configs.ts +++ b/yarn-project/circuit-types/src/interfaces/configs.ts @@ -33,4 +33,6 @@ export interface SequencerConfig { maxBlockSizeInBytes?: number; /** Whether to require every tx to have a fee payer */ enforceFees?: boolean; + /** Temporary flag to skip submitting proofs, so a prover-node takes care of it. */ + sequencerSkipSubmitProofs?: boolean; } diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index 34c08cca316..c898f123b13 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -9,3 +9,4 @@ export * from './proving-job.js'; export * from './block-prover.js'; export * from './server_circuit_prover.js'; export * from './private_kernel_prover.js'; +export * from './tx-provider.js'; diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index b4e6e6eda2e..78f9285bf68 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -297,6 +297,12 @@ export interface PXE { */ getBlockNumber(): Promise; + /** + * Fetches the current proven block number. + * @returns The block number. + */ + getProvenBlockNumber(): Promise; + /** * Returns the information about the server's node. Includes current Node version, compatible Noir version, * L1 chain identifier, protocol version, and L1 address of the rollup contract. diff --git a/yarn-project/circuit-types/src/interfaces/tx-provider.ts b/yarn-project/circuit-types/src/interfaces/tx-provider.ts new file mode 100644 index 00000000000..872f9ce0282 --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/tx-provider.ts @@ -0,0 +1,12 @@ +import { type Tx } from '../tx/tx.js'; +import { type TxHash } from '../tx/tx_hash.js'; + +/** Provider for transaction objects given their hash. */ +export interface TxProvider { + /** + * Returns a transaction given its hash if available. + * @param txHash - The hash of the transaction, used as an ID. + * @returns The transaction, if found, 'undefined' otherwise. + */ + getTxByHash(txHash: TxHash): Promise; +} diff --git a/yarn-project/circuit-types/src/tx/index.ts b/yarn-project/circuit-types/src/tx/index.ts index fe213cb0ec7..0198209ebeb 100644 --- a/yarn-project/circuit-types/src/tx/index.ts +++ b/yarn-project/circuit-types/src/tx/index.ts @@ -6,3 +6,4 @@ export * from './tx_hash.js'; export * from './tx_receipt.js'; export * from './validator/aggregate_tx_validator.js'; export * from './validator/tx_validator.js'; +export * from './validator/empty_validator.js'; diff --git a/yarn-project/circuit-types/src/tx/validator/empty_validator.ts b/yarn-project/circuit-types/src/tx/validator/empty_validator.ts new file mode 100644 index 00000000000..b96bd9b2381 --- /dev/null +++ b/yarn-project/circuit-types/src/tx/validator/empty_validator.ts @@ -0,0 +1,7 @@ +import { type AnyTx, type TxValidator } from './tx_validator.js'; + +export class EmptyTxValidator implements TxValidator { + public validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[]]> { + return Promise.resolve([txs, []]); + } +} diff --git a/yarn-project/cli/src/cmds/pxe/block_number.ts b/yarn-project/cli/src/cmds/pxe/block_number.ts index a4c017a2a75..7135f1d3d41 100644 --- a/yarn-project/cli/src/cmds/pxe/block_number.ts +++ b/yarn-project/cli/src/cmds/pxe/block_number.ts @@ -4,6 +4,7 @@ import { createCompatibleClient } from '../../client.js'; export async function blockNumber(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { const client = await createCompatibleClient(rpcUrl, debugLogger); - const num = await client.getBlockNumber(); - log(`${num}\n`); + const [latestNum, provenNum] = await Promise.all([client.getBlockNumber(), client.getProvenBlockNumber()]); + log(`Latest block: ${latestNum}`); + log(`Proven block: ${provenNum}\n`); } diff --git a/yarn-project/deploy_npm.sh b/yarn-project/deploy_npm.sh index b7154d1226c..03f33a893b5 100755 --- a/yarn-project/deploy_npm.sh +++ b/yarn-project/deploy_npm.sh @@ -104,5 +104,6 @@ deploy_package archiver deploy_package p2p deploy_package prover-client deploy_package sequencer-client +deploy_package prover-node deploy_package aztec-node deploy_package txe diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 278edefd153..467edb6571a 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -139,6 +139,9 @@ e2e-pending-note-hashes-contract: e2e-private-voting-contract: DO +E2E_TEST --test=./src/e2e_private_voting_contract.test.ts +e2e-prover-node: + DO +E2E_TEST --test=./src/e2e_prover_node.test.ts + e2e-fees-private-payments: DO +E2E_TEST --test=./src/e2e_fees/private_payments.test.ts diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index c60db38d6f9..d942bdecdc7 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -39,6 +39,7 @@ "@aztec/p2p": "workspace:^", "@aztec/protocol-contracts": "workspace:^", "@aztec/prover-client": "workspace:^", + "@aztec/prover-node": "workspace:^", "@aztec/pxe": "workspace:^", "@aztec/sequencer-client": "workspace:^", "@aztec/simulator": "workspace:^", diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 548265fc895..9830256a186 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -144,14 +144,14 @@ describe('L1Publisher integration', () => { }; const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); - builder = await TxProver.new(config, worldStateSynchronizer, new NoopTelemetryClient()); + builder = await TxProver.new(config, worldStateSynchronizer, blockSource, new NoopTelemetryClient()); publisher = getL1Publisher({ rpcUrl: config.rpcUrl, requiredConfirmations: 1, l1Contracts: l1ContractAddresses, publisherPrivateKey: sequencerPK, - l1BlockPublishRetryIntervalMS: 100, + l1PublishRetryIntervalMS: 100, l1ChainId: 31337, }); diff --git a/yarn-project/end-to-end/src/e2e_prover_node.test.ts b/yarn-project/end-to-end/src/e2e_prover_node.test.ts new file mode 100644 index 00000000000..af0bf72045a --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_prover_node.test.ts @@ -0,0 +1,112 @@ +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { type Archiver } from '@aztec/archiver'; +import { + type AccountWalletWithSecretKey, + type AztecAddress, + type DebugLogger, + type FieldsOf, + type TxReceipt, + createDebugLogger, + retryUntil, + sleep, +} from '@aztec/aztec.js'; +import { StatefulTestContract } from '@aztec/noir-contracts.js'; +import { createProverNode } from '@aztec/prover-node'; +import { type SequencerClientConfig } from '@aztec/sequencer-client'; + +import { + type ISnapshotManager, + type SubsystemsContext, + addAccounts, + createSnapshotManager, +} from './fixtures/snapshot_manager.js'; + +// Tests simple block building with a sequencer that does not upload proofs to L1, +// and then follows with a prover node run (with real proofs disabled, but +// still simulating all circuits via a prover-client), in order to test +// the coordination between the sequencer and the prover node. +describe('e2e_prover_node', () => { + let ctx: SubsystemsContext; + let wallet: AccountWalletWithSecretKey; + let recipient: AztecAddress; + let contract: StatefulTestContract; + let txReceipts: FieldsOf[]; + + let logger: DebugLogger; + + let snapshotManager: ISnapshotManager; + + beforeAll(async () => { + logger = createDebugLogger('aztec:e2e_prover_node'); + const config: Partial = { sequencerSkipSubmitProofs: true }; + snapshotManager = createSnapshotManager(`e2e_prover_node`, process.env.E2E_DATA_PATH, config); + + await snapshotManager.snapshot('setup', addAccounts(2, logger), async ({ accountKeys }, ctx) => { + const accountManagers = accountKeys.map(ak => getSchnorrAccount(ctx.pxe, ak[0], ak[1], 1)); + await Promise.all(accountManagers.map(a => a.register())); + const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); + wallets.forEach((w, i) => logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); + wallet = wallets[0]; + recipient = wallets[1].getAddress(); + }); + + await snapshotManager.snapshot( + 'deploy-test-contract', + async () => { + const owner = wallet.getAddress(); + const contract = await StatefulTestContract.deploy(wallet, owner, owner, 42).send().deployed(); + return { contractAddress: contract.address }; + }, + async ({ contractAddress }) => { + contract = await StatefulTestContract.at(contractAddress, wallet); + }, + ); + + await snapshotManager.snapshot( + 'create-blocks', + async () => { + const txReceipt1 = await contract.methods.create_note(recipient, recipient, 10).send().wait(); + const txReceipt2 = await contract.methods.increment_public_value(recipient, 20).send().wait(); + return { txReceipt1, txReceipt2 }; + }, + ({ txReceipt1, txReceipt2 }) => { + txReceipts = [txReceipt1, txReceipt2]; + return Promise.resolve(); + }, + ); + + ctx = await snapshotManager.setup(); + }); + + it('submits two blocks, then prover proves the first one', async () => { + // Check everything went well during setup and txs were mined in two different blocks + const [txReceipt1, txReceipt2] = txReceipts; + const firstBlock = txReceipt1.blockNumber!; + expect(txReceipt2.blockNumber).toEqual(firstBlock + 1); + expect(await contract.methods.get_public_value(recipient).simulate()).toEqual(20n); + expect(await contract.methods.summed_values(recipient).simulate()).toEqual(10n); + expect(await ctx.aztecNode.getProvenBlockNumber()).toEqual(0); + + // Trick archiver into thinking everything has been proven up to this point. + // TODO: Add cheat code to flag current block as proven on L1, which will be needed when we assert on L1 that proofs do not have any gaps. + await (ctx.aztecNode.getBlockSource() as Archiver).setProvenBlockNumber(firstBlock - 1); + expect(await ctx.aztecNode.getProvenBlockNumber()).toEqual(firstBlock - 1); + + // Kick off a prover node + await sleep(1000); + logger.info('Creating prover node'); + // HACK: We have to use the existing archiver to fetch L2 data, since anvil's chain dump/load used by the + // snapshot manager does not include events nor txs, so a new archiver would not "see" old blocks. + const proverConfig = { ...ctx.aztecNodeConfig, txProviderNodeUrl: undefined, dataDirectory: undefined }; + const archiver = ctx.aztecNode.getBlockSource() as Archiver; + const proverNode = await createProverNode(proverConfig, { aztecNodeTxProvider: ctx.aztecNode, archiver }); + + // Prove block from first tx and block until it is proven + logger.info(`Proving block ${firstBlock}`); + await proverNode.prove(firstBlock, firstBlock); + + logger.info(`Proof submitted. Awaiting aztec node to sync...`); + await retryUntil(async () => (await ctx.aztecNode.getProvenBlockNumber()) === firstBlock, 'proven-block', 10, 1); + expect(await ctx.aztecNode.getProvenBlockNumber()).toEqual(firstBlock); + }); +}); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 9b61cc6fdc2..ace5b321439 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -52,8 +52,8 @@ type SnapshotEntry = { snapshotPath: string; }; -export function createSnapshotManager(testName: string, dataPath?: string) { - return dataPath ? new SnapshotManager(testName, dataPath) : new MockSnapshotManager(testName); +export function createSnapshotManager(testName: string, dataPath?: string, config: Partial = {}) { + return dataPath ? new SnapshotManager(testName, dataPath, config) : new MockSnapshotManager(testName, config); } export interface ISnapshotManager { @@ -73,7 +73,7 @@ class MockSnapshotManager implements ISnapshotManager { private context?: SubsystemsContext; private logger: DebugLogger; - constructor(testName: string) { + constructor(testName: string, private config: Partial = {}) { this.logger = createDebugLogger(`aztec:snapshot_manager:${testName}`); this.logger.warn(`No data path given, will not persist any snapshots.`); } @@ -95,7 +95,7 @@ class MockSnapshotManager implements ISnapshotManager { public async setup() { if (!this.context) { - this.context = await setupFromFresh(undefined, this.logger); + this.context = await setupFromFresh(undefined, this.logger, this.config); } return this.context; } @@ -116,7 +116,7 @@ class SnapshotManager implements ISnapshotManager { private livePath: string; private logger: DebugLogger; - constructor(testName: string, private dataPath: string) { + constructor(testName: string, private dataPath: string, private config: Partial = {}) { this.livePath = join(this.dataPath, 'live', testName); this.logger = createDebugLogger(`aztec:snapshot_manager:${testName}`); } @@ -193,7 +193,7 @@ class SnapshotManager implements ISnapshotManager { this.logger.verbose(`Restoration of ${e.name} complete.`); }); } else { - this.context = await setupFromFresh(this.livePath, this.logger); + this.context = await setupFromFresh(this.livePath, this.logger, this.config); } } return this.context; @@ -227,12 +227,16 @@ async function teardown(context: SubsystemsContext | undefined) { * If given a statePath, the state will be written to the path. * If there is no statePath, in-memory and temporary state locations will be used. */ -async function setupFromFresh(statePath: string | undefined, logger: Logger): Promise { +async function setupFromFresh( + statePath: string | undefined, + logger: Logger, + config: Partial = {}, +): Promise { logger.verbose(`Initializing state...`); // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. - const aztecNodeConfig: AztecNodeConfig = getConfigEnvVars(); + const aztecNodeConfig: AztecNodeConfig = { ...getConfigEnvVars(), ...config }; aztecNodeConfig.dataDirectory = statePath; // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. @@ -257,7 +261,7 @@ async function setupFromFresh(statePath: string | undefined, logger: Logger): Pr const deployL1ContractsValues = await setupL1Contracts(aztecNodeConfig.rpcUrl, hdAccount, logger); aztecNodeConfig.publisherPrivateKey = `0x${publisherPrivKey!.toString('hex')}`; aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; - aztecNodeConfig.l1BlockPublishRetryIntervalMS = 100; + aztecNodeConfig.l1PublishRetryIntervalMS = 100; const acvmConfig = await getACVMConfig(logger); if (acvmConfig) { diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 243bd18ad93..d90102914d4 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -378,7 +378,7 @@ export async function setup( config.bbBinaryPath = bbConfig.bbBinaryPath; config.bbWorkingDirectory = bbConfig.bbWorkingDirectory; } - config.l1BlockPublishRetryIntervalMS = 100; + config.l1PublishRetryIntervalMS = 100; const aztecNode = await AztecNodeService.createAndSync(config, telemetry); const sequencer = aztecNode.getSequencer(); const prover = aztecNode.getProver(); diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index caad50a6304..a5d5341ad0b 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -60,6 +60,9 @@ { "path": "../prover-client" }, + { + "path": "../prover-node" + }, { "path": "../pxe" }, diff --git a/yarn-project/ethereum/src/l1_contract_addresses.ts b/yarn-project/ethereum/src/l1_contract_addresses.ts index c870de6d665..9c98ee87bb9 100644 --- a/yarn-project/ethereum/src/l1_contract_addresses.ts +++ b/yarn-project/ethereum/src/l1_contract_addresses.ts @@ -1,4 +1,4 @@ -import { type EthAddress } from '@aztec/foundation/eth-address'; +import { EthAddress } from '@aztec/foundation/eth-address'; /** * The names of the current L1 contract addresses. @@ -21,3 +21,29 @@ export const l1ContractsNames = [ export type L1ContractAddresses = { [K in (typeof l1ContractsNames)[number]]: EthAddress; }; + +export function getL1ContractAddressesFromEnv() { + const { + AVAILABILITY_ORACLE_CONTRACT_ADDRESS, + ROLLUP_CONTRACT_ADDRESS, + REGISTRY_CONTRACT_ADDRESS, + INBOX_CONTRACT_ADDRESS, + OUTBOX_CONTRACT_ADDRESS, + GAS_TOKEN_CONTRACT_ADDRESS, + GAS_PORTAL_CONTRACT_ADDRESS, + } = process.env; + + return { + availabilityOracleAddress: AVAILABILITY_ORACLE_CONTRACT_ADDRESS + ? EthAddress.fromString(AVAILABILITY_ORACLE_CONTRACT_ADDRESS) + : EthAddress.ZERO, + rollupAddress: ROLLUP_CONTRACT_ADDRESS ? EthAddress.fromString(ROLLUP_CONTRACT_ADDRESS) : EthAddress.ZERO, + registryAddress: REGISTRY_CONTRACT_ADDRESS ? EthAddress.fromString(REGISTRY_CONTRACT_ADDRESS) : EthAddress.ZERO, + inboxAddress: INBOX_CONTRACT_ADDRESS ? EthAddress.fromString(INBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, + outboxAddress: OUTBOX_CONTRACT_ADDRESS ? EthAddress.fromString(OUTBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, + gasTokenAddress: GAS_TOKEN_CONTRACT_ADDRESS ? EthAddress.fromString(GAS_TOKEN_CONTRACT_ADDRESS) : EthAddress.ZERO, + gasPortalAddress: GAS_PORTAL_CONTRACT_ADDRESS + ? EthAddress.fromString(GAS_PORTAL_CONTRACT_ADDRESS) + : EthAddress.ZERO, + }; +} diff --git a/yarn-project/foundation/src/log/log_fn.ts b/yarn-project/foundation/src/log/log_fn.ts index 5dd11204e28..f0141909886 100644 --- a/yarn-project/foundation/src/log/log_fn.ts +++ b/yarn-project/foundation/src/log/log_fn.ts @@ -1,5 +1,5 @@ /** Structured log data to include with the message. */ -export type LogData = Record; +export type LogData = Record; /** A callable logger instance. */ export type LogFn = (msg: string, data?: LogData) => void; diff --git a/yarn-project/kv-store/src/interfaces/index.ts b/yarn-project/kv-store/src/interfaces/index.ts index e478b630e65..c97d327fa04 100644 --- a/yarn-project/kv-store/src/interfaces/index.ts +++ b/yarn-project/kv-store/src/interfaces/index.ts @@ -3,4 +3,5 @@ export * from './map.js'; export * from './counter.js'; export * from './singleton.js'; export * from './store.js'; +export * from './set.js'; export { Range } from './common.js'; diff --git a/yarn-project/kv-store/src/interfaces/set.ts b/yarn-project/kv-store/src/interfaces/set.ts new file mode 100644 index 00000000000..4eaf1fe1442 --- /dev/null +++ b/yarn-project/kv-store/src/interfaces/set.ts @@ -0,0 +1,31 @@ +import { type Key, type Range } from './common.js'; + +/** + * A set backed by a persistent store. + */ +export interface AztecSet { + /** + * Checks if a key exists in the set. + * @param key - The key to check + * @returns True if the key exists, false otherwise + */ + has(key: K): boolean; + + /** + * Adds the given value. + * @param key - The key to add. + */ + add(key: K): Promise; + + /** + * Deletes the given key. + * @param key - The key to delete. + */ + delete(key: K): Promise; + + /** + * Iterates over the sets's keys entries in the key's natural order + * @param range - The range of keys to iterate over + */ + entries(range?: Range): IterableIterator; +} diff --git a/yarn-project/kv-store/src/interfaces/store.ts b/yarn-project/kv-store/src/interfaces/store.ts index 16962cf5695..f13a3241ab5 100644 --- a/yarn-project/kv-store/src/interfaces/store.ts +++ b/yarn-project/kv-store/src/interfaces/store.ts @@ -2,6 +2,7 @@ import { type AztecArray } from './array.js'; import { type Key } from './common.js'; import { type AztecCounter } from './counter.js'; import { type AztecMap, type AztecMultiMap } from './map.js'; +import { type AztecSet } from './set.js'; import { type AztecSingleton } from './singleton.js'; /** A key-value store */ @@ -13,6 +14,13 @@ export interface AztecKVStore { */ openMap(name: string): AztecMap; + /** + * Creates a new set. + * @param name - The name of the set + * @returns The set + */ + openSet(name: string): AztecSet; + /** * Creates a new multi-map. * @param name - The name of the multi-map diff --git a/yarn-project/kv-store/src/lmdb/set.test.ts b/yarn-project/kv-store/src/lmdb/set.test.ts new file mode 100644 index 00000000000..677b17e543f --- /dev/null +++ b/yarn-project/kv-store/src/lmdb/set.test.ts @@ -0,0 +1,54 @@ +import { type Database, open } from 'lmdb'; + +import { LmdbAztecSet } from './set.js'; + +describe('LmdbAztecSet', () => { + let db: Database; + let set: LmdbAztecSet; + + beforeEach(() => { + db = open({ dupSort: true } as any); + set = new LmdbAztecSet(db, 'test'); + }); + + it('should be able to set and get values', async () => { + await set.add('foo'); + await set.add('baz'); + + expect(set.has('foo')).toEqual(true); + expect(set.has('baz')).toEqual(true); + expect(set.has('bar')).toEqual(false); + }); + + it('should be able to delete values', async () => { + await set.add('foo'); + await set.add('baz'); + + await set.delete('foo'); + + expect(set.has('foo')).toEqual(false); + expect(set.has('baz')).toEqual(true); + }); + + it('should be able to iterate over entries', async () => { + await set.add('baz'); + await set.add('foo'); + + expect([...set.entries()]).toEqual(['baz', 'foo']); + }); + + it('supports range queries', async () => { + await set.add('a'); + await set.add('b'); + await set.add('c'); + await set.add('d'); + + expect([...set.entries({ start: 'b', end: 'c' })]).toEqual(['b']); + expect([...set.entries({ start: 'b' })]).toEqual(['b', 'c', 'd']); + expect([...set.entries({ end: 'c' })]).toEqual(['a', 'b']); + expect([...set.entries({ start: 'b', end: 'c', reverse: true })]).toEqual(['c']); + expect([...set.entries({ start: 'b', limit: 1 })]).toEqual(['b']); + expect([...set.entries({ start: 'b', reverse: true })]).toEqual(['d', 'c']); + expect([...set.entries({ end: 'b', reverse: true })]).toEqual(['b', 'a']); + }); +}); diff --git a/yarn-project/kv-store/src/lmdb/set.ts b/yarn-project/kv-store/src/lmdb/set.ts new file mode 100644 index 00000000000..887185b1b48 --- /dev/null +++ b/yarn-project/kv-store/src/lmdb/set.ts @@ -0,0 +1,35 @@ +import { type Database } from 'lmdb'; + +import { type Key, type Range } from '../interfaces/common.js'; +import { type AztecSet } from '../interfaces/set.js'; +import { LmdbAztecMap } from './map.js'; + +/** + * A set backed by LMDB. + */ +export class LmdbAztecSet implements AztecSet { + private map: LmdbAztecMap; + constructor(rootDb: Database, mapName: string) { + this.map = new LmdbAztecMap(rootDb, mapName); + } + + close(): Promise { + return this.map.close(); + } + + has(key: K): boolean { + return this.map.has(key); + } + + add(key: K): Promise { + return this.map.set(key, true); + } + + delete(key: K): Promise { + return this.map.delete(key); + } + + entries(range: Range = {}): IterableIterator { + return this.map.keys(range); + } +} diff --git a/yarn-project/kv-store/src/lmdb/store.ts b/yarn-project/kv-store/src/lmdb/store.ts index ef1c735ed32..4b7a115f2f9 100644 --- a/yarn-project/kv-store/src/lmdb/store.ts +++ b/yarn-project/kv-store/src/lmdb/store.ts @@ -5,11 +5,13 @@ import { type Database, type Key, type RootDatabase, open } from 'lmdb'; import { type AztecArray } from '../interfaces/array.js'; import { type AztecCounter } from '../interfaces/counter.js'; import { type AztecMap, type AztecMultiMap } from '../interfaces/map.js'; +import { type AztecSet } from '../interfaces/set.js'; import { type AztecSingleton } from '../interfaces/singleton.js'; import { type AztecKVStore } from '../interfaces/store.js'; import { LmdbAztecArray } from './array.js'; import { LmdbAztecCounter } from './counter.js'; import { LmdbAztecMap } from './map.js'; +import { LmdbAztecSet } from './set.js'; import { LmdbAztecSingleton } from './singleton.js'; /** @@ -71,6 +73,15 @@ export class AztecLmdbStore implements AztecKVStore { return new LmdbAztecMap(this.#data, name); } + /** + * Creates a new AztecSet in the store. + * @param name - Name of the set + * @returns A new AztecSet + */ + openSet(name: string): AztecSet { + return new LmdbAztecSet(this.#data, name); + } + /** * Creates a new AztecMultiMap in the store. A multi-map stores multiple values for a single key automatically. * @param name - Name of the map diff --git a/yarn-project/kv-store/src/utils.ts b/yarn-project/kv-store/src/utils.ts index d527e43a8b5..65cf56ceb4c 100644 --- a/yarn-project/kv-store/src/utils.ts +++ b/yarn-project/kv-store/src/utils.ts @@ -1,9 +1,17 @@ import { type EthAddress } from '@aztec/foundation/eth-address'; -import { type Logger } from '@aztec/foundation/log'; +import { type Logger, createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from './interfaces/store.js'; import { AztecLmdbStore } from './lmdb/store.js'; +export function createStore( + config: { dataDirectory: string | undefined }, + rollupAddress: EthAddress, + log: Logger = createDebugLogger('aztec:kv-store'), +) { + return initStoreForRollup(AztecLmdbStore.open(config.dataDirectory, false, log), rollupAddress, log); +} + /** * Clears the store if the rollup address does not match the one stored in the database. * This is to prevent data from being accidentally shared between different rollup instances. diff --git a/yarn-project/p2p/src/client/index.ts b/yarn-project/p2p/src/client/index.ts index 6a499693f64..2bde5c4c1e7 100644 --- a/yarn-project/p2p/src/client/index.ts +++ b/yarn-project/p2p/src/client/index.ts @@ -12,8 +12,8 @@ import { getPublicIp, splitAddressPort } from '../util.js'; export * from './p2p_client.js'; export const createP2PClient = async ( - store: AztecKVStore, config: P2PConfig, + store: AztecKVStore, txPool: TxPool, l2BlockSource: L2BlockSource, ) => { diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 6726df9aeaa..0e51054cf03 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -30,7 +30,10 @@ describe('In-Memory P2P Client', () => { deleteTxs: jest.fn(), getAllTxs: jest.fn().mockReturnValue([]), getAllTxHashes: jest.fn().mockReturnValue([]), - hasTx: jest.fn().mockReturnValue(false), + getMinedTxHashes: jest.fn().mockReturnValue([]), + getPendingTxHashes: jest.fn().mockReturnValue([]), + getTxStatus: jest.fn().mockReturnValue(undefined), + markAsMined: jest.fn(), }; p2pService = { @@ -96,6 +99,6 @@ describe('In-Memory P2P Client', () => { await client.stop(); const client2 = new P2PClient(kvStore, blockSource, txPool, p2pService); - expect(client2.getSyncedBlockNum()).toEqual(client.getSyncedBlockNum()); + expect(client2.getSyncedLatestBlockNum()).toEqual(client.getSyncedLatestBlockNum()); }); }); diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index f6fc0737b55..d4b74da2eae 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -52,14 +52,21 @@ export interface P2P { * Returns all transactions in the transaction pool. * @returns An array of Txs. */ - getTxs(): Promise; + getTxs(filter: 'all' | 'pending' | 'mined'): Tx[]; /** * Returns a transaction in the transaction pool by its hash. * @param txHash - Hash of tx to return. * @returns A single tx or undefined. */ - getTxByHash(txHash: TxHash): Promise; + getTxByHash(txHash: TxHash): Tx | undefined; + + /** + * Returns whether the given tx hash is flagged as pending or mined. + * @param txHash - Hash of the tx to query. + * @returns Pending or mined depending on its status, or undefined if not found. + */ + getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined; /** * Starts the p2p client. @@ -89,26 +96,26 @@ export interface P2P { * The P2P client implementation. */ export class P2PClient implements P2P { - /** - * L2 Block download that p2p client uses to stay in sync with latest blocks. - */ - private blockDownloader: L2BlockDownloader; + /** L2 block download to stay in sync with latest blocks. */ + private latestBlockDownloader: L2BlockDownloader; - /** - * Property that indicates whether the client is running. - */ + /** L2 block download to stay in sync with proven blocks. */ + private provenBlockDownloader: L2BlockDownloader; + + /** Property that indicates whether the client is running. */ private stopping = false; - /** - * The JS promise that will be running to keep the client's data in sync. Can be interrupted if the client is stopped. - */ + /** The JS promise that will be running to keep the client's data in sync. Can be interrupted if the client is stopped. */ private runningPromise!: Promise; private currentState = P2PClientState.IDLE; private syncPromise = Promise.resolve(); - private latestBlockNumberAtStart = -1; private syncResolve?: () => void = undefined; - private synchedBlockNumber: AztecSingleton; + private latestBlockNumberAtStart = -1; + private provenBlockNumberAtStart = -1; + + private synchedLatestBlockNumber: AztecSingleton; + private synchedProvenBlockNumber: AztecSingleton; /** * In-memory P2P client constructor. @@ -126,11 +133,15 @@ export class P2PClient implements P2P { private log = createDebugLogger('aztec:p2p'), ) { const { p2pBlockCheckIntervalMS: checkInterval, p2pL2QueueSize } = getP2PConfigEnvVars(); - this.blockDownloader = new L2BlockDownloader(l2BlockSource, { - maxQueueSize: p2pL2QueueSize, - pollIntervalMS: checkInterval, - }); - this.synchedBlockNumber = store.openSingleton('p2p_pool_last_l2_block'); + const l2DownloaderOpts = { maxQueueSize: p2pL2QueueSize, pollIntervalMS: checkInterval }; + // TODO(palla/prover-node): This effectively downloads blocks twice from the archiver, which is an issue + // if the archiver is remote. We should refactor this so the downloader keeps a single queue and handles + // latest/proven metadata, as well as block reorgs. + this.latestBlockDownloader = new L2BlockDownloader(l2BlockSource, l2DownloaderOpts); + this.provenBlockDownloader = new L2BlockDownloader(l2BlockSource, { ...l2DownloaderOpts, proven: true }); + + this.synchedLatestBlockNumber = store.openSingleton('p2p_pool_last_l2_block'); + this.synchedProvenBlockNumber = store.openSingleton('p2p_pool_last_proven_l2_block'); } /** @@ -145,41 +156,47 @@ export class P2PClient implements P2P { return this.syncPromise; } - // get the current latest block number + // get the current latest block numbers this.latestBlockNumberAtStart = await this.l2BlockSource.getBlockNumber(); + this.provenBlockNumberAtStart = await this.l2BlockSource.getProvenBlockNumber(); - const blockToDownloadFrom = this.getSyncedBlockNum() + 1; + const syncedLatestBlock = this.getSyncedLatestBlockNum() + 1; + const syncedProvenBlock = this.getSyncedProvenBlockNum() + 1; // if there are blocks to be retrieved, go to a synching state - if (blockToDownloadFrom <= this.latestBlockNumberAtStart) { + if (syncedLatestBlock <= this.latestBlockNumberAtStart || syncedProvenBlock <= this.provenBlockNumberAtStart) { this.setCurrentState(P2PClientState.SYNCHING); this.syncPromise = new Promise(resolve => { this.syncResolve = resolve; }); - this.log.verbose(`Starting sync from ${blockToDownloadFrom}, latest block ${this.latestBlockNumberAtStart}`); + this.log.verbose(`Starting sync from ${syncedLatestBlock} (last proven ${syncedProvenBlock})`); } else { // if no blocks to be retrieved, go straight to running this.setCurrentState(P2PClientState.RUNNING); this.syncPromise = Promise.resolve(); await this.p2pService.start(); - this.log.verbose( - `Next block ${blockToDownloadFrom} already beyond latest block at ${this.latestBlockNumberAtStart}`, - ); + this.log.verbose(`Block ${syncedLatestBlock} (proven ${syncedProvenBlock}) already beyond current block`); } // publish any txs in TxPool after its doing initial sync this.syncPromise = this.syncPromise.then(() => this.publishStoredTxs()); // start looking for further blocks - const blockProcess = async () => { + const processLatest = async () => { + while (!this.stopping) { + await this.latestBlockDownloader.getBlocks(1).then(this.handleLatestL2Blocks.bind(this)); + } + }; + const processProven = async () => { while (!this.stopping) { - const blocks = await this.blockDownloader.getBlocks(); - await this.handleL2Blocks(blocks); + await this.provenBlockDownloader.getBlocks(1).then(this.handleProvenL2Blocks.bind(this)); } }; - this.runningPromise = blockProcess(); - this.blockDownloader.start(blockToDownloadFrom); - this.log.verbose(`Started block downloader from block ${blockToDownloadFrom}`); + + this.runningPromise = Promise.all([processLatest(), processProven()]).then(() => {}); + this.latestBlockDownloader.start(syncedLatestBlock); + this.provenBlockDownloader.start(syncedLatestBlock); + this.log.verbose(`Started block downloader from block ${syncedLatestBlock}`); return this.syncPromise; } @@ -193,7 +210,8 @@ export class P2PClient implements P2P { this.stopping = true; await this.p2pService.stop(); this.log.debug('Stopped p2p service'); - await this.blockDownloader.stop(); + await this.latestBlockDownloader.stop(); + await this.provenBlockDownloader.stop(); this.log.debug('Stopped block downloader'); await this.runningPromise; this.setCurrentState(P2PClientState.STOPPED); @@ -204,8 +222,23 @@ export class P2PClient implements P2P { * Returns all transactions in the transaction pool. * @returns An array of Txs. */ - public getTxs(): Promise { - return Promise.resolve(this.txPool.getAllTxs()); + public getTxs(filter: 'all' | 'pending' | 'mined'): Tx[] { + if (filter === 'all') { + return this.txPool.getAllTxs(); + } else if (filter === 'mined') { + return this.txPool + .getMinedTxHashes() + .map(txHash => this.txPool.getTxByHash(txHash)) + .filter((tx): tx is Tx => !!tx); + } else if (filter === 'pending') { + return this.txPool + .getPendingTxHashes() + .map(txHash => this.txPool.getTxByHash(txHash)) + .filter((tx): tx is Tx => !!tx); + } else { + const _: never = filter; + throw new Error(`Unknown filter ${filter}`); + } } /** @@ -213,8 +246,8 @@ export class P2PClient implements P2P { * @param txHash - Hash of the transaction to look for in the pool. * @returns A single tx or undefined. */ - getTxByHash(txHash: TxHash): Promise { - return Promise.resolve(this.txPool.getTxByHash(txHash)); + getTxByHash(txHash: TxHash): Tx | undefined { + return this.txPool.getTxByHash(txHash); } /** @@ -231,6 +264,15 @@ export class P2PClient implements P2P { this.p2pService.propagateTx(tx); } + /** + * Returns whether the given tx hash is flagged as pending or mined. + * @param txHash - Hash of the tx to query. + * @returns Pending or mined depending on its status, or undefined if not found. + */ + public getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined { + return this.txPool.getTxStatus(txHash); + } + /** * Deletes the 'txs' from the pool. * NOT used if we use sendTx as reconcileTxPool will handle this. @@ -257,8 +299,16 @@ export class P2PClient implements P2P { * Public function to check the latest block number that the P2P client is synced to. * @returns Block number of latest L2 Block we've synced with. */ - public getSyncedBlockNum() { - return this.synchedBlockNumber.get() ?? INITIAL_L2_BLOCK_NUM - 1; + public getSyncedLatestBlockNum() { + return this.synchedLatestBlockNumber.get() ?? INITIAL_L2_BLOCK_NUM - 1; + } + + /** + * Public function to check the latest proven block number that the P2P client is synced to. + * @returns Block number of latest proven L2 Block we've synced with. + */ + public getSyncedProvenBlockNum() { + return this.synchedProvenBlockNumber.get() ?? INITIAL_L2_BLOCK_NUM - 1; } /** @@ -268,16 +318,28 @@ export class P2PClient implements P2P { public getStatus(): Promise { return Promise.resolve({ state: this.currentState, - syncedToL2Block: this.getSyncedBlockNum(), + syncedToL2Block: this.getSyncedLatestBlockNum(), } as P2PSyncState); } /** - * Internal method that uses the provided blocks to check against the client's tx pool. + * Mark all txs from these blocks as mined. + * @param blocks - A list of existing blocks with txs that the P2P client needs to ensure the tx pool is reconciled with. + * @returns Empty promise. + */ + private async markTxsAsMinedFromBlocks(blocks: L2Block[]): Promise { + for (const block of blocks) { + const txHashes = block.body.txEffects.map(txEffect => txEffect.txHash); + await this.txPool.markAsMined(txHashes); + } + } + + /** + * Deletes txs from these blocks. * @param blocks - A list of existing blocks with txs that the P2P client needs to ensure the tx pool is reconciled with. * @returns Empty promise. */ - private async reconcileTxPool(blocks: L2Block[]): Promise { + private async deleteTxsFromBlocks(blocks: L2Block[]): Promise { for (const block of blocks) { const txHashes = block.body.txEffects.map(txEffect => txEffect.txHash); await this.txPool.deleteTxs(txHashes); @@ -285,20 +347,44 @@ export class P2PClient implements P2P { } /** - * Method for processing new blocks. + * Handles new mined blocks by marking the txs in them as mined. * @param blocks - A list of existing blocks with txs that the P2P client needs to ensure the tx pool is reconciled with. * @returns Empty promise. */ - private async handleL2Blocks(blocks: L2Block[]): Promise { + private async handleLatestL2Blocks(blocks: L2Block[]): Promise { if (!blocks.length) { return Promise.resolve(); } - await this.reconcileTxPool(blocks); + await this.markTxsAsMinedFromBlocks(blocks); const lastBlockNum = blocks[blocks.length - 1].number; - await this.synchedBlockNumber.set(lastBlockNum); - this.log.debug(`Synched to block ${lastBlockNum}`); + await this.synchedLatestBlockNumber.set(lastBlockNum); + this.log.debug(`Synched to latest block ${lastBlockNum}`); + await this.startServiceIfSynched(); + } + + /** + * Handles new proven blocks by deleting the txs in them. + * @param blocks - A list of existing blocks with txs that the P2P client needs to ensure the tx pool is reconciled with. + * @returns Empty promise. + */ + private async handleProvenL2Blocks(blocks: L2Block[]): Promise { + if (!blocks.length) { + return Promise.resolve(); + } + await this.deleteTxsFromBlocks(blocks); + const lastBlockNum = blocks[blocks.length - 1].number; + await this.synchedProvenBlockNumber.set(lastBlockNum); + this.log.debug(`Synched to proven block ${lastBlockNum}`); + await this.startServiceIfSynched(); + } - if (this.currentState === P2PClientState.SYNCHING && lastBlockNum >= this.latestBlockNumberAtStart) { + private async startServiceIfSynched() { + if ( + this.currentState === P2PClientState.SYNCHING && + this.getSyncedLatestBlockNum() >= this.latestBlockNumberAtStart && + this.getSyncedProvenBlockNum() >= this.provenBlockNumberAtStart + ) { + this.log.debug(`Synched to blocks at start`); this.setCurrentState(P2PClientState.RUNNING); if (this.syncResolve !== undefined) { this.syncResolve(); diff --git a/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts b/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts index f3756f83713..16fa4be870f 100644 --- a/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts +++ b/yarn-project/p2p/src/tx_pool/aztec_kv_tx_pool.ts @@ -1,7 +1,7 @@ import { Tx, TxHash } from '@aztec/circuit-types'; import { type TxAddedToPoolStats } from '@aztec/circuit-types/stats'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; -import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { type AztecKVStore, type AztecMap, type AztecSet } from '@aztec/kv-store'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { TxPoolInstrumentation } from './instrumentation.js'; @@ -13,11 +13,14 @@ import { type TxPool } from './tx_pool.js'; export class AztecKVTxPool implements TxPool { #store: AztecKVStore; - /** - * Our tx pool, stored as a Map in-memory, with K: tx hash and V: the transaction. - */ + /** Our tx pool, stored as a Map, with K: tx hash and V: the transaction. */ #txs: AztecMap; + /** Index for pending txs. */ + #pendingTxs: AztecSet; + /** Index for mined txs. */ + #minedTxs: AztecSet; + #log: Logger; #metrics: TxPoolInstrumentation; @@ -29,11 +32,43 @@ export class AztecKVTxPool implements TxPool { */ constructor(store: AztecKVStore, telemetry: TelemetryClient, log = createDebugLogger('aztec:tx_pool')) { this.#txs = store.openMap('txs'); + this.#minedTxs = store.openSet('minedTxs'); + this.#pendingTxs = store.openSet('pendingTxs'); + this.#store = store; this.#log = log; this.#metrics = new TxPoolInstrumentation(telemetry, 'AztecKVTxPool'); } + public markAsMined(txHashes: TxHash[]): Promise { + return this.#store.transaction(() => { + for (const hash of txHashes) { + const key = hash.toString(); + void this.#minedTxs.add(key); + void this.#pendingTxs.delete(key); + } + }); + } + + public getPendingTxHashes(): TxHash[] { + return Array.from(this.#pendingTxs.entries()).map(x => TxHash.fromString(x)); + } + + public getMinedTxHashes(): TxHash[] { + return Array.from(this.#minedTxs.entries()).map(x => TxHash.fromString(x)); + } + + public getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined { + const key = txHash.toString(); + if (this.#pendingTxs.has(key)) { + return 'pending'; + } else if (this.#minedTxs.has(key)) { + return 'mined'; + } else { + return undefined; + } + } + /** * Checks if a transaction exists in the pool and returns it. * @param txHash - The generated tx hash. @@ -59,7 +94,12 @@ export class AztecKVTxPool implements TxPool { ...tx.getStats(), } satisfies TxAddedToPoolStats); - void this.#txs.set(txHash.toString(), tx.toBuffer()); + const key = txHash.toString(); + void this.#txs.set(key, tx.toBuffer()); + if (!this.#minedTxs.has(key)) { + // REFACTOR: Use an lmdb conditional write to avoid race conditions with this write tx + void this.#pendingTxs.add(key); + } } this.#metrics.recordTxs(txs); @@ -74,7 +114,10 @@ export class AztecKVTxPool implements TxPool { public deleteTxs(txHashes: TxHash[]): Promise { return this.#store.transaction(() => { for (const hash of txHashes) { - void this.#txs.delete(hash.toString()); + const key = hash.toString(); + void this.#txs.delete(key); + void this.#pendingTxs.delete(key); + void this.#minedTxs.delete(key); } this.#metrics.removeTxs(txHashes.length); @@ -96,13 +139,4 @@ export class AztecKVTxPool implements TxPool { public getAllTxHashes(): TxHash[] { return Array.from(this.#txs.keys()).map(x => TxHash.fromString(x)); } - - /** - * Returns a boolean indicating if the transaction is present in the pool. - * @param txHash - The hash of the transaction to be queried. - * @returns True if the transaction present, false otherwise. - */ - public hasTx(txHash: TxHash): boolean { - return this.#txs.has(txHash.toString()); - } } diff --git a/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts b/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts index 924f907214f..30d3a23b31b 100644 --- a/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts +++ b/yarn-project/p2p/src/tx_pool/memory_tx_pool.ts @@ -14,6 +14,8 @@ export class InMemoryTxPool implements TxPool { * Our tx pool, stored as a Map in-memory, with K: tx hash and V: the transaction. */ private txs: Map; + private minedTxs: Set; + private pendingTxs: Set; private metrics: TxPoolInstrumentation; @@ -23,9 +25,39 @@ export class InMemoryTxPool implements TxPool { */ constructor(telemetry: TelemetryClient, private log = createDebugLogger('aztec:tx_pool')) { this.txs = new Map(); + this.minedTxs = new Set(); + this.pendingTxs = new Set(); this.metrics = new TxPoolInstrumentation(telemetry, 'InMemoryTxPool'); } + public markAsMined(txHashes: TxHash[]): Promise { + const keys = txHashes.map(x => x.toBigInt()); + for (const key of keys) { + this.minedTxs.add(key); + this.pendingTxs.delete(key); + } + return Promise.resolve(); + } + + public getPendingTxHashes(): TxHash[] { + return Array.from(this.pendingTxs).map(x => TxHash.fromBigInt(x)); + } + + public getMinedTxHashes(): TxHash[] { + return Array.from(this.minedTxs).map(x => TxHash.fromBigInt(x)); + } + + public getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined { + const key = txHash.toBigInt(); + if (this.pendingTxs.has(key)) { + return 'pending'; + } + if (this.minedTxs.has(key)) { + return 'mined'; + } + return undefined; + } + /** * Checks if a transaction exists in the pool and returns it. * @param txHash - The generated tx hash. @@ -49,7 +81,12 @@ export class InMemoryTxPool implements TxPool { eventName: 'tx-added-to-pool', ...tx.getStats(), } satisfies TxAddedToPoolStats); - this.txs.set(txHash.toBigInt(), tx); + + const key = txHash.toBigInt(); + this.txs.set(key, tx); + if (!this.minedTxs.has(key)) { + this.pendingTxs.add(key); + } } return Promise.resolve(); } @@ -62,7 +99,10 @@ export class InMemoryTxPool implements TxPool { public deleteTxs(txHashes: TxHash[]): Promise { this.metrics.removeTxs(txHashes.length); for (const txHash of txHashes) { - this.txs.delete(txHash.toBigInt()); + const key = txHash.toBigInt(); + this.txs.delete(key); + this.pendingTxs.delete(key); + this.minedTxs.delete(key); } return Promise.resolve(); } @@ -82,13 +122,4 @@ export class InMemoryTxPool implements TxPool { public getAllTxHashes(): TxHash[] { return Array.from(this.txs.keys()).map(x => TxHash.fromBigInt(x)); } - - /** - * Returns a boolean indicating if the transaction is present in the pool. - * @param txHash - The hash of the transaction to be queried. - * @returns True if the transaction present, false otherwise. - */ - public hasTx(txHash: TxHash): boolean { - return this.txs.has(txHash.toBigInt()); - } } diff --git a/yarn-project/p2p/src/tx_pool/tx_pool.ts b/yarn-project/p2p/src/tx_pool/tx_pool.ts index 0ca939f7989..4cf434bb3e0 100644 --- a/yarn-project/p2p/src/tx_pool/tx_pool.ts +++ b/yarn-project/p2p/src/tx_pool/tx_pool.ts @@ -17,6 +17,12 @@ export interface TxPool { */ getTxByHash(txHash: TxHash): Tx | undefined; + /** + * Marks the set of txs as mined, as opposed to pending. + * @param txHashes - Hashes of the txs to flag as mined. + */ + markAsMined(txHashes: TxHash[]): Promise; + /** * Deletes transactions from the pool. Tx hashes that are not present are ignored. * @param txHashes - An array of tx hashes to be removed from the tx pool. @@ -36,9 +42,21 @@ export interface TxPool { getAllTxHashes(): TxHash[]; /** - * Returns a boolean indicating if the transaction is present in the pool. - * @param txHash - The hash of the transaction to be queried. - * @returns True if the transaction present, false otherwise. + * Gets the hashes of pending transactions currently in the tx pool. + * @returns An array of pending transaction hashes found in the tx pool. + */ + getPendingTxHashes(): TxHash[]; + + /** + * Gets the hashes of mined transactions currently in the tx pool. + * @returns An array of mined transaction hashes found in the tx pool. + */ + getMinedTxHashes(): TxHash[]; + + /** + * Returns whether the given tx hash is flagged as pending or mined. + * @param txHash - Hash of the tx to query. + * @returns Pending or mined depending on its status, or undefined if not found. */ - hasTx(txHash: TxHash): boolean; + getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined; } diff --git a/yarn-project/p2p/src/tx_pool/tx_pool_test_suite.ts b/yarn-project/p2p/src/tx_pool/tx_pool_test_suite.ts index ea08bd1b475..2c72c1afa80 100644 --- a/yarn-project/p2p/src/tx_pool/tx_pool_test_suite.ts +++ b/yarn-project/p2p/src/tx_pool/tx_pool_test_suite.ts @@ -13,12 +13,14 @@ export function describeTxPool(getTxPool: () => TxPool) { pool = getTxPool(); }); - it('Adds txs to the pool', async () => { + it('Adds txs to the pool as pending', async () => { const tx1 = mockTx(); await pool.addTxs([tx1]); const poolTx = pool.getTxByHash(tx1.getTxHash()); expect(poolTx!.getTxHash()).toEqual(tx1.getTxHash()); + expect(pool.getTxStatus(tx1.getTxHash())).toEqual('pending'); + expect(pool.getPendingTxHashes()).toEqual([tx1.getTxHash()]); }); it('Removes txs from the pool', async () => { @@ -27,8 +29,21 @@ export function describeTxPool(getTxPool: () => TxPool) { await pool.addTxs([tx1]); await pool.deleteTxs([tx1.getTxHash()]); - const poolTx = pool.getTxByHash(tx1.getTxHash()); - expect(poolTx).toBeFalsy(); + expect(pool.getTxByHash(tx1.getTxHash())).toBeFalsy(); + expect(pool.getTxStatus(tx1.getTxHash())).toBeUndefined(); + }); + + it('Marks txs as mined', async () => { + const tx1 = mockTx(1); + const tx2 = mockTx(2); + + await pool.addTxs([tx1, tx2]); + await pool.markAsMined([tx1.getTxHash()]); + + expect(pool.getTxByHash(tx1.getTxHash())).toEqual(tx1); + expect(pool.getTxStatus(tx1.getTxHash())).toEqual('mined'); + expect(pool.getMinedTxHashes()).toEqual([tx1.getTxHash()]); + expect(pool.getPendingTxHashes()).toEqual([tx2.getTxHash()]); }); it('Returns all transactions in the pool', async () => { diff --git a/yarn-project/package.json b/yarn-project/package.json index b395389b5cb..af28c7bce27 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -47,6 +47,7 @@ "p2p-bootstrap", "protocol-contracts", "prover-client", + "prover-node", "rollup-provider", "sequencer-client", "scripts", diff --git a/yarn-project/prover-client/src/config.ts b/yarn-project/prover-client/src/config.ts index 5f93b0b0e25..383e85fcb36 100644 --- a/yarn-project/prover-client/src/config.ts +++ b/yarn-project/prover-client/src/config.ts @@ -14,6 +14,8 @@ export type ProverClientConfig = ProverConfig & { bbWorkingDirectory: string; /** The path to the bb binary */ bbBinaryPath: string; + /** True to disable proving altogether. */ + disableProver: boolean; }; /** @@ -28,6 +30,7 @@ export function getProverEnvVars(): ProverClientConfig { ACVM_BINARY_PATH = '', BB_WORKING_DIRECTORY = tmpdir(), BB_BINARY_PATH = '', + PROVER_DISABLED = '', /** @deprecated */ PROVER_AGENTS = '1', PROVER_AGENT_ENABLED = '1', @@ -44,6 +47,7 @@ export function getProverEnvVars(): ProverClientConfig { const proverAgentPollInterval = safeParseNumber(PROVER_AGENT_POLL_INTERVAL_MS, 100); const proverJobTimeoutMs = safeParseNumber(PROVER_JOB_TIMEOUT_MS, 60000); const proverJobPollIntervalMs = safeParseNumber(PROVER_JOB_POLL_INTERVAL_MS, 1000); + const disableProver = ['1', 'true'].includes(PROVER_DISABLED); return { acvmWorkingDirectory: ACVM_WORKING_DIRECTORY, @@ -51,6 +55,7 @@ export function getProverEnvVars(): ProverClientConfig { bbBinaryPath: BB_BINARY_PATH, bbWorkingDirectory: BB_WORKING_DIRECTORY, realProofs, + disableProver, proverAgentEnabled, proverAgentPollInterval, proverAgentConcurrency, diff --git a/yarn-project/prover-client/src/index.ts b/yarn-project/prover-client/src/index.ts index 1c2f83414e5..1945a792047 100644 --- a/yarn-project/prover-client/src/index.ts +++ b/yarn-project/prover-client/src/index.ts @@ -2,3 +2,4 @@ export { ProverClient } from '@aztec/circuit-types'; export * from './tx-prover/tx-prover.js'; export * from './config.js'; +export * from './tx-prover/factory.js'; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 385f50e629e..90c3ec5fb03 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -133,7 +133,7 @@ export class ProvingOrchestrator { ): Promise { // Create initial header if not done so yet if (!this.initialHeader) { - this.initialHeader = await this.db.buildInitialHeader(); + this.initialHeader = await this.db.getInitialHeader(); } if (!Number.isInteger(numTxs) || numTxs < 2) { @@ -268,7 +268,7 @@ export class ProvingOrchestrator { // base rollup inputs // Then enqueue the proving of all the transactions const unprovenPaddingTx = makeEmptyProcessedTx( - this.initialHeader ?? (await this.db.buildInitialHeader()), + this.initialHeader ?? (await this.db.getInitialHeader()), this.provingState.globalVariables.chainId, this.provingState.globalVariables.version, getVKTreeRoot(), @@ -564,7 +564,7 @@ export class ProvingOrchestrator { const getBaseInputsEmptyTx = async () => { const inputs = { - header: await this.db.buildInitialHeader(), + header: await this.db.getInitialHeader(), chainId: tx.data.constants.globalVariables.chainId, version: tx.data.constants.globalVariables.version, vkTreeRoot: tx.data.constants.vkTreeRoot, diff --git a/yarn-project/prover-client/src/tx-prover/factory.ts b/yarn-project/prover-client/src/tx-prover/factory.ts new file mode 100644 index 00000000000..6fc40d7f634 --- /dev/null +++ b/yarn-project/prover-client/src/tx-prover/factory.ts @@ -0,0 +1,16 @@ +import { type L2BlockSource } from '@aztec/circuit-types'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { type WorldStateSynchronizer } from '@aztec/world-state'; + +import { type ProverClientConfig } from '../config.js'; +import { TxProver } from './tx-prover.js'; + +export function createProverClient( + config: ProverClientConfig, + worldStateSynchronizer: WorldStateSynchronizer, + blockSource: L2BlockSource, + telemetry: TelemetryClient = new NoopTelemetryClient(), +) { + return config.disableProver ? undefined : TxProver.new(config, worldStateSynchronizer, blockSource, telemetry); +} diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 41cda4da56f..6f2a99107ee 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -1,5 +1,5 @@ import { BBNativeRollupProver, TestCircuitProver } from '@aztec/bb-prover'; -import { type ProcessedTx } from '@aztec/circuit-types'; +import { type L2BlockSource, type ProcessedTx } from '@aztec/circuit-types'; import { type BlockResult, type ProverClient, @@ -96,8 +96,8 @@ export class TxProver implements ProverClient { public static async new( config: ProverClientConfig, worldStateSynchronizer: WorldStateSynchronizer, + blockSource: L2BlockSource, telemetry: TelemetryClient, - initialHeader?: Header, ) { const agent = config.proverAgentEnabled ? new ProverAgent( @@ -107,6 +107,7 @@ export class TxProver implements ProverClient { ) : undefined; + const initialHeader = await worldStateSynchronizer.getCommitted().getInitialHeader(); const prover = new TxProver(config, worldStateSynchronizer, telemetry, agent, initialHeader); await prover.start(); return prover; diff --git a/yarn-project/prover-node/.eslintrc.cjs b/yarn-project/prover-node/.eslintrc.cjs new file mode 100644 index 00000000000..e659927475c --- /dev/null +++ b/yarn-project/prover-node/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/prover-node/README.md b/yarn-project/prover-node/README.md new file mode 100644 index 00000000000..e31824c879a --- /dev/null +++ b/yarn-project/prover-node/README.md @@ -0,0 +1 @@ +# Prover Node diff --git a/yarn-project/prover-node/package.json b/yarn-project/prover-node/package.json new file mode 100644 index 00000000000..efcd145e763 --- /dev/null +++ b/yarn-project/prover-node/package.json @@ -0,0 +1,85 @@ +{ + "name": "@aztec/prover-node", + "version": "0.1.0", + "type": "module", + "exports": { + ".": "./dest/index.js" + }, + "inherits": [ + "../package.common.json" + ], + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "bb": "node --no-warnings ./dest/bb/index.js", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + }, + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ] + }, + "dependencies": { + "@aztec/archiver": "workspace:^", + "@aztec/circuit-types": "workspace:^", + "@aztec/circuits.js": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:^", + "@aztec/prover-client": "workspace:^", + "@aztec/sequencer-client": "workspace:^", + "@aztec/simulator": "workspace:^", + "@aztec/telemetry-client": "workspace:^", + "@aztec/world-state": "workspace:^", + "source-map-support": "^0.5.21", + "tslib": "^2.4.0" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", + "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", + "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "types": "./dest/index.d.ts", + "engines": { + "node": ">=18" + } +} diff --git a/yarn-project/prover-node/src/config.ts b/yarn-project/prover-node/src/config.ts new file mode 100644 index 00000000000..59f99b68f60 --- /dev/null +++ b/yarn-project/prover-node/src/config.ts @@ -0,0 +1,25 @@ +import { type ArchiverConfig, getArchiverConfigFromEnv } from '@aztec/archiver'; +import { type ProverClientConfig, getProverEnvVars } from '@aztec/prover-client'; +import { type PublisherConfig, type TxSenderConfig, getTxSenderConfigFromEnv } from '@aztec/sequencer-client'; +import { type WorldStateConfig, getWorldStateConfigFromEnv } from '@aztec/world-state'; + +import { type TxProviderConfig, getTxProviderConfigFromEnv } from './tx-provider/config.js'; + +export type ProverNodeConfig = ArchiverConfig & + ProverClientConfig & + WorldStateConfig & + PublisherConfig & + TxSenderConfig & + TxProviderConfig; + +export function getProverNodeConfigFromEnv(): ProverNodeConfig { + const { PROOF_PUBLISH_RETRY_INTERVAL_MS } = process.env; + return { + ...getArchiverConfigFromEnv(), + ...getProverEnvVars(), + ...getWorldStateConfigFromEnv(), + ...getTxSenderConfigFromEnv('PROVER'), + ...getTxProviderConfigFromEnv(), + l1PublishRetryIntervalMS: PROOF_PUBLISH_RETRY_INTERVAL_MS ? +PROOF_PUBLISH_RETRY_INTERVAL_MS : 1_000, + }; +} diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts new file mode 100644 index 00000000000..6b5c65d20f2 --- /dev/null +++ b/yarn-project/prover-node/src/factory.ts @@ -0,0 +1,56 @@ +import { type Archiver, createArchiver } from '@aztec/archiver'; +import { type AztecNode } from '@aztec/circuit-types'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { createStore } from '@aztec/kv-store/utils'; +import { createProverClient } from '@aztec/prover-client'; +import { getL1Publisher } from '@aztec/sequencer-client'; +import { PublicProcessorFactory, createSimulationProvider } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { createWorldStateSynchronizer } from '@aztec/world-state'; + +import { type ProverNodeConfig } from './config.js'; +import { ProverNode } from './prover-node.js'; +import { AztecNodeTxProvider } from './tx-provider/aztec-node-tx-provider.js'; +import { createTxProvider } from './tx-provider/factory.js'; + +/** Creates a new prover node given a config. */ +export async function createProverNode( + config: ProverNodeConfig, + deps: { + telemetry?: TelemetryClient; + log?: DebugLogger; + storeLog?: DebugLogger; + aztecNodeTxProvider?: AztecNode; + archiver?: Archiver; + } = {}, +) { + const telemetry = deps.telemetry ?? new NoopTelemetryClient(); + const log = deps.log ?? createDebugLogger('aztec:prover'); + const storeLog = deps.storeLog ?? createDebugLogger('aztec:prover:lmdb'); + + const store = await createStore(config, config.l1Contracts.rollupAddress, storeLog); + + const archiver = deps.archiver ?? (await createArchiver(config, store, telemetry, { blockUntilSync: true })); + log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`); + + const worldStateConfig = { ...config, worldStateProvenBlocksOnly: true }; + const worldStateSynchronizer = await createWorldStateSynchronizer(worldStateConfig, store, archiver); + await worldStateSynchronizer.start(); + + const simulationProvider = await createSimulationProvider(config, log); + + const prover = await createProverClient(config, worldStateSynchronizer, archiver); + + // REFACTOR: Move publisher out of sequencer package and into an L1-related package + const publisher = getL1Publisher(config); + + const latestWorldState = worldStateSynchronizer.getLatest(); + const publicProcessorFactory = new PublicProcessorFactory(latestWorldState, archiver, simulationProvider, telemetry); + + const txProvider = deps.aztecNodeTxProvider + ? new AztecNodeTxProvider(deps.aztecNodeTxProvider) + : createTxProvider(config); + + return new ProverNode(prover!, publicProcessorFactory, publisher, archiver, txProvider); +} diff --git a/yarn-project/prover-node/src/http.ts b/yarn-project/prover-node/src/http.ts new file mode 100644 index 00000000000..940053a0eab --- /dev/null +++ b/yarn-project/prover-node/src/http.ts @@ -0,0 +1,25 @@ +import { AztecAddress, EthAddress, Fr, Header } from '@aztec/circuits.js'; +import { JsonRpcServer } from '@aztec/foundation/json-rpc/server'; + +import { type ProverNode } from './prover-node.js'; + +/** + * Wrap a ProverNode instance with a JSON RPC HTTP server. + * @param node - The ProverNode + * @returns An JSON-RPC HTTP server + */ +export function createProverNodeRpcServer(node: ProverNode) { + const rpc = new JsonRpcServer( + node, + { + AztecAddress, + EthAddress, + Fr, + Header, + }, + {}, + // disable methods not part of the AztecNode interface + ['start', 'stop', 'createProvingJob', 'work', 'getProver'], + ); + return rpc; +} diff --git a/yarn-project/prover-node/src/index.ts b/yarn-project/prover-node/src/index.ts new file mode 100644 index 00000000000..21851f2c615 --- /dev/null +++ b/yarn-project/prover-node/src/index.ts @@ -0,0 +1,4 @@ +export * from './factory.js'; +export * from './config.js'; +export * from './prover-node.js'; +export * from './http.js'; diff --git a/yarn-project/prover-node/src/job/block-proving-job.ts b/yarn-project/prover-node/src/job/block-proving-job.ts new file mode 100644 index 00000000000..e1c4c99dcb6 --- /dev/null +++ b/yarn-project/prover-node/src/job/block-proving-job.ts @@ -0,0 +1,147 @@ +import { + type BlockProver, + EmptyTxValidator, + type L2Block, + type L2BlockSource, + PROVING_STATUS, + type ProcessedTx, + type Tx, + type TxHash, + type TxProvider, +} from '@aztec/circuit-types'; +import { type Fr } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { type L1Publisher } from '@aztec/sequencer-client'; +import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simulator'; + +/** + * Job that grabs a range of blocks from the unfinalised chain from L1, gets their txs given their hashes, + * re-executes their public calls, generates a rollup proof, and submits it to L1. This job will update the + * world state as part of public call execution via the public processor. + */ +export class BlockProvingJob { + private state: BlockProvingJobState = 'initialized'; + private log = createDebugLogger('aztec:block-proving-job'); + + constructor( + private prover: BlockProver, + private publicProcessorFactory: PublicProcessorFactory, + private publisher: L1Publisher, + private l2BlockSource: L2BlockSource, + private txProvider: TxProvider, + ) {} + + public getState(): BlockProvingJobState { + return this.state; + } + + public async run(fromBlock: number, toBlock: number) { + if (fromBlock !== toBlock) { + throw new Error(`Block ranges are not yet supported`); + } + + this.log.info(`Starting block proving job`, { fromBlock, toBlock }); + this.state = 'started'; + + // TODO: Fast-forward world state to fromBlock and/or await fromBlock to be published to the unproven chain + + this.state = 'processing'; + + let historicalHeader = (await this.l2BlockSource.getBlock(fromBlock - 1))?.header; + for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) { + const block = await this.getBlock(blockNumber); + const globalVariables = block.header.globalVariables; + const txHashes = block.body.txEffects.map(tx => tx.txHash); + const txCount = block.body.numberOfTxsIncludingPadded; + const l1ToL2Messages: Fr[] = []; // TODO: grab L1 to L2 messages for this block + + this.log.verbose(`Starting block processing`, { + number: block.number, + blockHash: block.hash().toString(), + lastArchive: block.header.lastArchive.root, + noteHashTreeRoot: block.header.state.partial.noteHashTree.root, + nullifierTreeRoot: block.header.state.partial.nullifierTree.root, + publicDataTreeRoot: block.header.state.partial.publicDataTree.root, + historicalHeader: historicalHeader?.hash(), + ...globalVariables, + }); + const provingTicket = await this.prover.startNewBlock(txCount, globalVariables, l1ToL2Messages); + const publicProcessor = await this.publicProcessorFactory.create(historicalHeader, globalVariables); + + const txs = await this.getTxs(txHashes); + await this.processTxs(publicProcessor, txs, txCount); + + this.log.verbose(`Processed all txs for block`, { + blockNumber: block.number, + blockHash: block.hash().toString(), + }); + + await this.prover.setBlockCompleted(); + + const result = await provingTicket.provingPromise; + if (result.status === PROVING_STATUS.FAILURE) { + throw new Error(`Block proving failed: ${result.reason}`); + } + + historicalHeader = block.header; + } + + this.state = 'awaiting-prover'; + const { block, aggregationObject, proof } = await this.prover.finaliseBlock(); + this.log.info(`Finalised proof for block range`, { fromBlock, toBlock }); + + this.state = 'publishing-proof'; + await this.publisher.submitProof(block.header, block.archive.root, aggregationObject, proof); + this.log.info(`Submitted proof for block range`, { fromBlock, toBlock }); + + this.state = 'completed'; + } + + private async getBlock(blockNumber: number): Promise { + const block = await this.l2BlockSource.getBlock(blockNumber); + if (!block) { + throw new Error(`Block ${blockNumber} not found in L2 block source`); + } + return block; + } + + private async getTxs(txHashes: TxHash[]): Promise { + const txs = await Promise.all( + txHashes.map(txHash => this.txProvider.getTxByHash(txHash).then(tx => [txHash, tx] as const)), + ); + const notFound = txs.filter(([_, tx]) => !tx); + if (notFound.length) { + throw new Error(`Txs not found: ${notFound.map(([txHash]) => txHash.toString()).join(', ')}`); + } + return txs.map(([_, tx]) => tx!); + } + + private async processTxs( + publicProcessor: PublicProcessor, + txs: Tx[], + totalNumberOfTxs: number, + ): Promise { + const [processedTxs, failedTxs] = await publicProcessor.process( + txs, + totalNumberOfTxs, + this.prover, + new EmptyTxValidator(), + ); + + if (failedTxs.length) { + throw new Error( + `Failed to process txs: ${failedTxs.map(({ tx, error }) => `${tx.getTxHash()} (${error})`).join(', ')}`, + ); + } + + return processedTxs; + } +} + +export type BlockProvingJobState = + | 'initialized' + | 'started' + | 'processing' + | 'awaiting-prover' + | 'publishing-proof' + | 'completed'; diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts new file mode 100644 index 00000000000..bd41429fb27 --- /dev/null +++ b/yarn-project/prover-node/src/prover-node.ts @@ -0,0 +1,108 @@ +import { type L2BlockSource, type ProverClient, type TxProvider } from '@aztec/circuit-types'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { RunningPromise } from '@aztec/foundation/running-promise'; +import { type L1Publisher } from '@aztec/sequencer-client'; +import { type PublicProcessorFactory } from '@aztec/simulator'; + +import { BlockProvingJob } from './job/block-proving-job.js'; + +/** + * An Aztec Prover Node is a standalone process that monitors the unfinalised chain on L1 for unproven blocks, + * fetches their txs from a tx source in the p2p network or an external node, re-executes their public functions, + * creates a rollup proof, and submits it to L1. + */ +export class ProverNode { + private log = createDebugLogger('aztec:prover-node'); + private runningPromise: RunningPromise | undefined; + + constructor( + private prover: ProverClient, + private publicProcessorFactory: PublicProcessorFactory, + private publisher: L1Publisher, + private l2BlockSource: L2BlockSource, + private txProvider: TxProvider, + private options: { pollingIntervalMs: number; disableAutomaticProving: boolean } = { + pollingIntervalMs: 1_000, + disableAutomaticProving: false, + }, + ) {} + + /** + * Starts the prover node so it periodically checks for unproven blocks in the unfinalised chain from L1 and proves them. + * This may change once we implement a prover coordination mechanism. + */ + start() { + this.runningPromise = new RunningPromise(this.work.bind(this), this.options.pollingIntervalMs); + this.runningPromise.start(); + this.log.info('Started ProverNode'); + } + + /** + * Stops the prover node and all its dependencies. + */ + async stop() { + this.log.info('Stopping ProverNode'); + await this.runningPromise?.stop(); + await this.prover.stop(); + await this.l2BlockSource.stop(); + this.publisher.interrupt(); + this.log.info('Stopped ProverNode'); + } + + /** + * Single iteration of recurring work. This method is called periodically by the running promise. + * Checks whether there are new blocks to prove, proves them, and submits them. + * Only proves one block per job and one job at a time (for now). + */ + protected async work() { + if (this.options.disableAutomaticProving) { + return; + } + + const [latestBlockNumber, latestProvenBlockNumber] = await Promise.all([ + this.l2BlockSource.getBlockNumber(), + this.l2BlockSource.getProvenBlockNumber(), + ]); + + if (latestProvenBlockNumber >= latestBlockNumber) { + this.log.debug(`No new blocks to prove`, { latestBlockNumber, latestProvenBlockNumber }); + return; + } + + const fromBlock = latestProvenBlockNumber + 1; + const toBlock = fromBlock; // We only prove one block at a time for now + await this.prove(fromBlock, toBlock); + } + + /** + * Creates a proof for a block range. Returns once the proof has been submitted to L1. + */ + public prove(fromBlock: number, toBlock: number) { + return this.createProvingJob().run(fromBlock, toBlock); + } + + /** + * Starts a proving process and returns immediately. + */ + public startProof(fromBlock: number, toBlock: number) { + void this.createProvingJob().run(fromBlock, toBlock); + return Promise.resolve(); + } + + /** + * Returns the prover instance. + */ + public getProver() { + return this.prover; + } + + private createProvingJob() { + return new BlockProvingJob( + this.prover, + this.publicProcessorFactory, + this.publisher, + this.l2BlockSource, + this.txProvider, + ); + } +} diff --git a/yarn-project/prover-node/src/tx-provider/aztec-node-tx-provider.ts b/yarn-project/prover-node/src/tx-provider/aztec-node-tx-provider.ts new file mode 100644 index 00000000000..90797602453 --- /dev/null +++ b/yarn-project/prover-node/src/tx-provider/aztec-node-tx-provider.ts @@ -0,0 +1,10 @@ +import { type AztecNode, type Tx, type TxHash, type TxProvider } from '@aztec/circuit-types'; + +/** Implements TxProvider by querying an Aztec node for the txs. */ +export class AztecNodeTxProvider implements TxProvider { + constructor(private node: AztecNode) {} + + getTxByHash(txHash: TxHash): Promise { + return this.node.getTxByHash(txHash); + } +} diff --git a/yarn-project/prover-node/src/tx-provider/config.ts b/yarn-project/prover-node/src/tx-provider/config.ts new file mode 100644 index 00000000000..a318352a335 --- /dev/null +++ b/yarn-project/prover-node/src/tx-provider/config.ts @@ -0,0 +1,9 @@ +export type TxProviderConfig = { + txProviderNodeUrl: string | undefined; +}; + +export function getTxProviderConfigFromEnv(): TxProviderConfig { + return { + txProviderNodeUrl: process.env.TX_PROVIDER_NODE_URL ?? process.env.AZTEC_NODE_URL, + }; +} diff --git a/yarn-project/prover-node/src/tx-provider/factory.ts b/yarn-project/prover-node/src/tx-provider/factory.ts new file mode 100644 index 00000000000..e17d13e00c0 --- /dev/null +++ b/yarn-project/prover-node/src/tx-provider/factory.ts @@ -0,0 +1,13 @@ +import { type TxProvider, createAztecNodeClient } from '@aztec/circuit-types'; + +import { AztecNodeTxProvider } from './aztec-node-tx-provider.js'; +import { type TxProviderConfig } from './config.js'; + +export function createTxProvider(config: TxProviderConfig): TxProvider { + if (config.txProviderNodeUrl) { + const node = createAztecNodeClient(config.txProviderNodeUrl); + return new AztecNodeTxProvider(node); + } else { + throw new Error(`Aztec Node URL for Tx Provider is not set.`); + } +} diff --git a/yarn-project/prover-node/src/tx-provider/index.ts b/yarn-project/prover-node/src/tx-provider/index.ts new file mode 100644 index 00000000000..bac271dd877 --- /dev/null +++ b/yarn-project/prover-node/src/tx-provider/index.ts @@ -0,0 +1,3 @@ +export * from './aztec-node-tx-provider.js'; +export * from './factory.js'; +export * from './config.js'; diff --git a/yarn-project/prover-node/tsconfig.json b/yarn-project/prover-node/tsconfig.json new file mode 100644 index 00000000000..9f241158c41 --- /dev/null +++ b/yarn-project/prover-node/tsconfig.json @@ -0,0 +1,41 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../archiver" + }, + { + "path": "../circuit-types" + }, + { + "path": "../circuits.js" + }, + { + "path": "../foundation" + }, + { + "path": "../kv-store" + }, + { + "path": "../prover-client" + }, + { + "path": "../sequencer-client" + }, + { + "path": "../simulator" + }, + { + "path": "../telemetry-client" + }, + { + "path": "../world-state" + } + ], + "include": ["src"] +} diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 7ad3a9b0c53..9c2c0fc9065 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -569,10 +569,14 @@ export class PXEService implements PXE { return this.node.getTxEffect(txHash); } - async getBlockNumber(): Promise { + public async getBlockNumber(): Promise { return await this.node.getBlockNumber(); } + public async getProvenBlockNumber(): Promise { + return await this.node.getProvenBlockNumber(); + } + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs. diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index c75517c3938..5fe566e154f 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -1,16 +1,14 @@ import { type AllowedElement } from '@aztec/circuit-types'; import { AztecAddress, Fr, FunctionSelector, getContractClassFromArtifact } from '@aztec/circuits.js'; -import { type L1ContractAddresses, NULL_KEY } from '@aztec/ethereum'; +import { getL1ContractAddressesFromEnv } from '@aztec/ethereum'; import { EthAddress } from '@aztec/foundation/eth-address'; import { FPCContract } from '@aztec/noir-contracts.js/FPC'; import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { AuthRegistryAddress } from '@aztec/protocol-contracts/auth-registry'; import { GasTokenAddress } from '@aztec/protocol-contracts/gas-token'; -import { type Hex } from 'viem'; - import { type GlobalReaderConfig } from './global_variable_builder/index.js'; -import { type PublisherConfig, type TxSenderConfig } from './publisher/config.js'; +import { type PublisherConfig, type TxSenderConfig, getTxSenderConfigFromEnv } from './publisher/config.js'; import { type SequencerConfig } from './sequencer/config.js'; /** Chain configuration. */ @@ -35,11 +33,7 @@ export type SequencerClientConfig = PublisherConfig & */ export function getConfigEnvVars(): SequencerClientConfig { const { - SEQ_PUBLISHER_PRIVATE_KEY, - ETHEREUM_HOST, - L1_CHAIN_ID, VERSION, - SEQ_REQUIRED_CONFIRMATIONS, SEQ_PUBLISH_RETRY_INTERVAL_MS, SEQ_TX_POLLING_INTERVAL_MS, SEQ_MAX_TX_PER_BLOCK, @@ -47,13 +41,7 @@ export function getConfigEnvVars(): SequencerClientConfig { SEQ_ALLOWED_SETUP_FN, SEQ_ALLOWED_TEARDOWN_FN, SEQ_MAX_BLOCK_SIZE_IN_BYTES, - AVAILABILITY_ORACLE_CONTRACT_ADDRESS, - ROLLUP_CONTRACT_ADDRESS, - REGISTRY_CONTRACT_ADDRESS, - INBOX_CONTRACT_ADDRESS, - OUTBOX_CONTRACT_ADDRESS, - GAS_TOKEN_CONTRACT_ADDRESS, - GAS_PORTAL_CONTRACT_ADDRESS, + SEQ_SKIP_SUBMIT_PROOFS, COINBASE, FEE_RECIPIENT, ACVM_WORKING_DIRECTORY, @@ -61,37 +49,16 @@ export function getConfigEnvVars(): SequencerClientConfig { ENFORCE_FEES = '', } = process.env; - const publisherPrivateKey: Hex = SEQ_PUBLISHER_PRIVATE_KEY - ? `0x${SEQ_PUBLISHER_PRIVATE_KEY.replace('0x', '')}` - : NULL_KEY; - // Populate the relevant addresses for use by the sequencer - const addresses: L1ContractAddresses = { - availabilityOracleAddress: AVAILABILITY_ORACLE_CONTRACT_ADDRESS - ? EthAddress.fromString(AVAILABILITY_ORACLE_CONTRACT_ADDRESS) - : EthAddress.ZERO, - rollupAddress: ROLLUP_CONTRACT_ADDRESS ? EthAddress.fromString(ROLLUP_CONTRACT_ADDRESS) : EthAddress.ZERO, - registryAddress: REGISTRY_CONTRACT_ADDRESS ? EthAddress.fromString(REGISTRY_CONTRACT_ADDRESS) : EthAddress.ZERO, - inboxAddress: INBOX_CONTRACT_ADDRESS ? EthAddress.fromString(INBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, - outboxAddress: OUTBOX_CONTRACT_ADDRESS ? EthAddress.fromString(OUTBOX_CONTRACT_ADDRESS) : EthAddress.ZERO, - gasTokenAddress: GAS_TOKEN_CONTRACT_ADDRESS ? EthAddress.fromString(GAS_TOKEN_CONTRACT_ADDRESS) : EthAddress.ZERO, - gasPortalAddress: GAS_PORTAL_CONTRACT_ADDRESS - ? EthAddress.fromString(GAS_PORTAL_CONTRACT_ADDRESS) - : EthAddress.ZERO, - }; - return { enforceFees: ['1', 'true'].includes(ENFORCE_FEES), - rpcUrl: ETHEREUM_HOST ? ETHEREUM_HOST : '', - l1ChainId: L1_CHAIN_ID ? +L1_CHAIN_ID : 31337, // 31337 is the default chain id for anvil version: VERSION ? +VERSION : 1, // 1 is our default version - requiredConfirmations: SEQ_REQUIRED_CONFIRMATIONS ? +SEQ_REQUIRED_CONFIRMATIONS : 1, - l1BlockPublishRetryIntervalMS: SEQ_PUBLISH_RETRY_INTERVAL_MS ? +SEQ_PUBLISH_RETRY_INTERVAL_MS : 1_000, + l1PublishRetryIntervalMS: SEQ_PUBLISH_RETRY_INTERVAL_MS ? +SEQ_PUBLISH_RETRY_INTERVAL_MS : 1_000, transactionPollingIntervalMS: SEQ_TX_POLLING_INTERVAL_MS ? +SEQ_TX_POLLING_INTERVAL_MS : 1_000, maxBlockSizeInBytes: SEQ_MAX_BLOCK_SIZE_IN_BYTES ? +SEQ_MAX_BLOCK_SIZE_IN_BYTES : undefined, - l1Contracts: addresses, - publisherPrivateKey, + l1Contracts: getL1ContractAddressesFromEnv(), maxTxsPerBlock: SEQ_MAX_TX_PER_BLOCK ? +SEQ_MAX_TX_PER_BLOCK : 32, minTxsPerBlock: SEQ_MIN_TX_PER_BLOCK ? +SEQ_MIN_TX_PER_BLOCK : 1, + sequencerSkipSubmitProofs: ['1', 'true'].includes(SEQ_SKIP_SUBMIT_PROOFS ?? ''), // TODO: undefined should not be allowed for the following 2 values in PROD coinbase: COINBASE ? EthAddress.fromString(COINBASE) : undefined, feeRecipient: FEE_RECIPIENT ? AztecAddress.fromString(FEE_RECIPIENT) : undefined, @@ -103,6 +70,7 @@ export function getConfigEnvVars(): SequencerClientConfig { allowedInTeardown: SEQ_ALLOWED_TEARDOWN_FN ? parseSequencerAllowList(SEQ_ALLOWED_TEARDOWN_FN) : getDefaultAllowedTeardownFunctions(), + ...getTxSenderConfigFromEnv('SEQ'), }; } diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 1292ae65d12..dc7675b2f52 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -1,4 +1,6 @@ -import { type L1ContractAddresses } from '@aztec/ethereum'; +import { type L1ContractAddresses, NULL_KEY } from '@aztec/ethereum'; + +import { type Hex } from 'viem'; /** * The configuration of the rollup transaction publisher. @@ -37,5 +39,21 @@ export interface PublisherConfig { /** * The interval to wait between publish retries. */ - l1BlockPublishRetryIntervalMS: number; + l1PublishRetryIntervalMS: number; +} + +export function getTxSenderConfigFromEnv(scope: 'PROVER' | 'SEQ'): Omit { + const { ETHEREUM_HOST, L1_CHAIN_ID } = process.env; + + const PUBLISHER_PRIVATE_KEY = process.env[`${scope}_PUBLISHER_PRIVATE_KEY`]; + const REQUIRED_CONFIRMATIONS = process.env[`${scope}_REQUIRED_CONFIRMATIONS`]; + + const publisherPrivateKey: Hex = PUBLISHER_PRIVATE_KEY ? `0x${PUBLISHER_PRIVATE_KEY.replace('0x', '')}` : NULL_KEY; + + return { + rpcUrl: ETHEREUM_HOST ? ETHEREUM_HOST : '', + requiredConfirmations: REQUIRED_CONFIRMATIONS ? +REQUIRED_CONFIRMATIONS : 1, + publisherPrivateKey, + l1ChainId: L1_CHAIN_ID ? +L1_CHAIN_ID : 31337, + }; } diff --git a/yarn-project/sequencer-client/src/publisher/index.ts b/yarn-project/sequencer-client/src/publisher/index.ts index 529011d5005..8e5f8dbd60e 100644 --- a/yarn-project/sequencer-client/src/publisher/index.ts +++ b/yarn-project/sequencer-client/src/publisher/index.ts @@ -3,7 +3,7 @@ import { L1Publisher } from './l1-publisher.js'; import { ViemTxSender } from './viem-tx-sender.js'; export { L1Publisher } from './l1-publisher.js'; -export { PublisherConfig } from './config.js'; +export { PublisherConfig, TxSenderConfig, getTxSenderConfigFromEnv } from './config.js'; /** * Returns a new instance of the L1Publisher. diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 6b0012fcb77..6ceaa3d6f1b 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -47,7 +47,7 @@ describe('L1Publisher', () => { txSender.getTransactionReceipt.mockResolvedValueOnce(publishTxReceipt).mockResolvedValueOnce(processTxReceipt); txSender.getCurrentArchive.mockResolvedValue(l2Block.header.lastArchive.root.toBuffer()); - publisher = new L1Publisher(txSender, { l1BlockPublishRetryIntervalMS: 1 }); + publisher = new L1Publisher(txSender, { l1PublishRetryIntervalMS: 1 }); }); it('publishes l2 block to l1', async () => { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 775751f56eb..313f5345e0f 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -134,7 +134,7 @@ export class L1Publisher implements L2BlockReceiver { private log = createDebugLogger('aztec:sequencer:publisher'); constructor(private txSender: L1PublisherTxSender, config?: PublisherConfig) { - this.sleepTimeMs = config?.l1BlockPublishRetryIntervalMS ?? 60_000; + this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; } public async isItMyTurnToSubmit(blockNumber: number): Promise { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index dfe83e9de2d..68e42e82136 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -131,7 +131,7 @@ describe('sequencer', () => { provingPromise: Promise.resolve(result), }; - p2p.getTxs.mockResolvedValueOnce([tx]); + p2p.getTxs.mockReturnValueOnce([tx]); proverClient.startNewBlock.mockResolvedValueOnce(ticket); proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); @@ -163,7 +163,7 @@ describe('sequencer', () => { provingPromise: Promise.resolve(result), }; - p2p.getTxs.mockResolvedValueOnce([tx]); + p2p.getTxs.mockReturnValueOnce([tx]); proverClient.startNewBlock.mockResolvedValueOnce(ticket); proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); @@ -204,7 +204,7 @@ describe('sequencer', () => { provingPromise: Promise.resolve(result), }; - p2p.getTxs.mockResolvedValueOnce(txs); + p2p.getTxs.mockReturnValueOnce(txs); proverClient.startNewBlock.mockResolvedValueOnce(ticket); proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); @@ -248,7 +248,7 @@ describe('sequencer', () => { provingPromise: Promise.resolve(result), }; - p2p.getTxs.mockResolvedValueOnce(txs); + p2p.getTxs.mockReturnValueOnce(txs); proverClient.startNewBlock.mockResolvedValueOnce(ticket); proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); @@ -286,7 +286,7 @@ describe('sequencer', () => { provingPromise: Promise.resolve(result), }; - p2p.getTxs.mockResolvedValueOnce(txs); + p2p.getTxs.mockReturnValueOnce(txs); proverClient.startNewBlock.mockResolvedValueOnce(ticket); proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); @@ -322,7 +322,7 @@ describe('sequencer', () => { provingPromise: Promise.resolve(result), }; - p2p.getTxs.mockResolvedValueOnce([tx]); + p2p.getTxs.mockReturnValueOnce([tx]); proverClient.startNewBlock.mockResolvedValueOnce(ticket); proverClient.finaliseBlock.mockResolvedValue({ block, aggregationObject: [], proof }); publisher.processL2Block.mockResolvedValueOnce(true); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 72e949174b2..88ef405ae46 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -50,6 +50,7 @@ export class Sequencer { private allowedInSetup: AllowedElement[] = []; private allowedInTeardown: AllowedElement[] = []; private maxBlockSizeInBytes: number = 1024 * 1024; + private skipSubmitProofs: boolean = false; public readonly tracer: Tracer; @@ -102,6 +103,10 @@ export class Sequencer { if (config.allowedInTeardown) { this.allowedInTeardown = config.allowedInTeardown; } + // TODO(palla/prover) This flag should not be needed: the sequencer should be initialized with a blockprover + // that does not return proofs at all (just simulates circuits), and use that to determine whether to submit + // proofs or not. + this.skipSubmitProofs = !!config.sequencerSkipSubmitProofs; } /** @@ -175,15 +180,18 @@ export class Sequencer { // Do not go forward with new block if not my turn if (!(await this.publisher.isItMyTurnToSubmit(newBlockNumber))) { - this.log.verbose('Not my turn to submit block'); + this.log.debug('Not my turn to submit block'); return; } this.state = SequencerState.WAITING_FOR_TXS; // Get txs to build the new block - const pendingTxs = await this.p2pClient.getTxs(); + const pendingTxs = this.p2pClient.getTxs('pending'); if (pendingTxs.length < this.minTxsPerBLock) { + this.log.debug( + `Not creating block because there are not enough txs in the pool (got ${pendingTxs.length} min ${this.minTxsPerBLock})`, + ); return; } this.log.debug(`Retrieved ${pendingTxs.length} txs from P2P pool`); @@ -309,10 +317,10 @@ export class Sequencer { await this.publishL2Block(block); this.log.info(`Submitted rollup block ${block.number} with ${processedTxs.length} transactions`); - // Submit the proof if we have configured this sequencer to run with a prover. + // Submit the proof if we have configured this sequencer to run with an actual prover. // This is temporary while we submit one proof per block, but will have to change once we // move onto proving batches of multiple blocks at a time. - if (aggregationObject && proof) { + if (aggregationObject && proof && !this.skipSubmitProofs) { await this.publisher.submitProof(block.header, block.archive.root, aggregationObject, proof); this.log.info(`Submitted proof for block ${block.number}`); } @@ -378,7 +386,15 @@ export class Sequencer { this.l1ToL2MessageSource.getBlockNumber(), ]); const min = Math.min(...syncedBlocks); - return min >= this.lastPublishedBlock; + const [worldState, p2p, l2BlockSource, l1ToL2MessageSource] = syncedBlocks; + const result = min >= this.lastPublishedBlock; + this.log.debug(`Sync check to last published block ${this.lastPublishedBlock} ${result ? 'succeeded' : 'failed'}`, { + worldState, + p2p, + l2BlockSource, + l1ToL2MessageSource, + }); + return result; } get coinbase(): EthAddress { diff --git a/yarn-project/simulator/src/providers/factory.ts b/yarn-project/simulator/src/providers/factory.ts new file mode 100644 index 00000000000..73f7c70cd55 --- /dev/null +++ b/yarn-project/simulator/src/providers/factory.ts @@ -0,0 +1,38 @@ +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; + +import * as fs from 'fs/promises'; + +import { NativeACVMSimulator } from './acvm_native.js'; +import { WASMSimulator } from './acvm_wasm.js'; +import { type SimulationProvider } from './simulation_provider.js'; + +export type SimulationProviderConfig = { + acvmBinaryPath?: string; + acvmWorkingDirectory?: string; +}; + +export function getSimulationProviderConfigFromEnv() { + const { ACVM_BINARY_PATH, ACVM_WORKING_DIRECTORY } = process.env; + return { + acvmWorkingDirectory: ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : undefined, + acvmBinaryPath: ACVM_BINARY_PATH ? ACVM_BINARY_PATH : undefined, + }; +} + +export async function createSimulationProvider( + config: SimulationProviderConfig, + logger: DebugLogger = createDebugLogger('aztec:simulator'), +): Promise { + if (config.acvmBinaryPath && config.acvmWorkingDirectory) { + try { + await fs.access(config.acvmBinaryPath, fs.constants.R_OK); + await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); + logger.info(`Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`); + return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath); + } catch { + logger.warn(`Failed to access ACVM at ${config.acvmBinaryPath}, falling back to WASM`); + } + } + logger.info('Using WASM ACVM simulation'); + return new WASMSimulator(); +} diff --git a/yarn-project/simulator/src/providers/index.ts b/yarn-project/simulator/src/providers/index.ts index 936e33be0fa..15902dc2b65 100644 --- a/yarn-project/simulator/src/providers/index.ts +++ b/yarn-project/simulator/src/providers/index.ts @@ -1,3 +1,4 @@ export * from './acvm_native.js'; export * from './acvm_wasm.js'; export * from './simulation_provider.js'; +export * from './factory.js'; diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 2277545a759..f8281f9ae6d 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -128,6 +128,12 @@ export class PublicProcessor { const [processedTx, returnValues] = !tx.hasPublicCalls() ? [makeProcessedTx(tx, tx.data.toKernelCircuitPublicInputs(), [])] : await this.processTxWithPublicCalls(tx); + this.log.debug(`Processed tx`, { + txHash: processedTx.hash, + historicalHeaderHash: processedTx.data.constants.historicalHeader.hash(), + blockNumber: processedTx.data.constants.globalVariables.blockNumber, + lastArchiveRoot: processedTx.data.constants.historicalHeader.lastArchive.root, + }); // Set fee payment update request into the processed tx processedTx.finalPublicDataUpdateRequests = await this.createFinalDataUpdateRequests(processedTx); diff --git a/yarn-project/tsconfig.json b/yarn-project/tsconfig.json index 68777079e4a..e5706e34b3b 100644 --- a/yarn-project/tsconfig.json +++ b/yarn-project/tsconfig.json @@ -43,6 +43,7 @@ { "path": "p2p-bootstrap/tsconfig.json" }, { "path": "protocol-contracts/tsconfig.json" }, { "path": "prover-client/tsconfig.json" }, + { "path": "prover-node/tsconfig.json" }, { "path": "sequencer-client/tsconfig.json" }, { "path": "types/tsconfig.json" }, { "path": "world-state/tsconfig.json" }, diff --git a/yarn-project/world-state/src/synchronizer/config.ts b/yarn-project/world-state/src/synchronizer/config.ts index 7474b7e55e5..79ef4b94df9 100644 --- a/yarn-project/world-state/src/synchronizer/config.ts +++ b/yarn-project/world-state/src/synchronizer/config.ts @@ -14,7 +14,7 @@ export interface WorldStateConfig { * Returns the configuration values for the world state synchronizer. * @returns The configuration values for the world state synchronizer. */ -export function getConfigEnvVars(): WorldStateConfig { +export function getWorldStateConfigFromEnv(): WorldStateConfig { const { WS_BLOCK_CHECK_INTERVAL_MS, WS_L2_BLOCK_QUEUE_SIZE, WS_PROVEN_BLOCKS_ONLY } = process.env; const envVars: WorldStateConfig = { worldStateBlockCheckIntervalMS: WS_BLOCK_CHECK_INTERVAL_MS ? +WS_BLOCK_CHECK_INTERVAL_MS : 100, diff --git a/yarn-project/world-state/src/synchronizer/factory.ts b/yarn-project/world-state/src/synchronizer/factory.ts new file mode 100644 index 00000000000..ed49fe94f36 --- /dev/null +++ b/yarn-project/world-state/src/synchronizer/factory.ts @@ -0,0 +1,15 @@ +import { type L1ToL2MessageSource, type L2BlockSource } from '@aztec/circuit-types'; +import { type AztecKVStore } from '@aztec/kv-store'; + +import { MerkleTrees } from '../world-state-db/merkle_trees.js'; +import { type WorldStateConfig } from './config.js'; +import { ServerWorldStateSynchronizer } from './server_world_state_synchronizer.js'; + +export async function createWorldStateSynchronizer( + config: WorldStateConfig, + store: AztecKVStore, + l2BlockSource: L2BlockSource & L1ToL2MessageSource, +) { + const merkleTrees = await MerkleTrees.new(store); + return new ServerWorldStateSynchronizer(store, merkleTrees, l2BlockSource, config); +} diff --git a/yarn-project/world-state/src/synchronizer/index.ts b/yarn-project/world-state/src/synchronizer/index.ts index c0c744ca7f9..4c7128617fa 100644 --- a/yarn-project/world-state/src/synchronizer/index.ts +++ b/yarn-project/world-state/src/synchronizer/index.ts @@ -1,2 +1,3 @@ export * from './server_world_state_synchronizer.js'; export * from './world_state_synchronizer.js'; +export * from './factory.js'; diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts index 579a0cbf6ba..d7ee9504f71 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts @@ -44,7 +44,7 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { store: AztecKVStore, private merkleTreeDb: MerkleTrees, private l2BlockSource: L2BlockSource & L1ToL2MessageSource, - config: WorldStateConfig, + private config: WorldStateConfig, private log = createDebugLogger('aztec:world_state'), ) { this.blockNumber = store.openSingleton('world_state_synch_last_block_number'); @@ -76,7 +76,9 @@ export class ServerWorldStateSynchronizer implements WorldStateSynchronizer { } // get the current latest block number - this.latestBlockNumberAtStart = await this.l2BlockSource.getBlockNumber(); + this.latestBlockNumberAtStart = await (this.config.worldStateProvenBlocksOnly + ? this.l2BlockSource.getProvenBlockNumber() + : this.l2BlockSource.getBlockNumber()); const blockToDownloadFrom = this.currentL2BlockNum + 1; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_operations.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_operations.ts index bbfd04eae66..b3887f34269 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_operations.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_operations.ts @@ -72,8 +72,14 @@ export interface MerkleTreeOperations { */ getStateReference(): Promise; + /** + * Gets the initial header. + */ + getInitialHeader(): Promise
; + /** * Builds the initial header. + * @deprecated Use `getInitialHeader` instead. */ buildInitialHeader(): Promise
; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts index d26a7d12b66..1aba0010b8a 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts @@ -44,6 +44,10 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { return this.trees.buildInitialHeader(this.includeUncommitted); } + getInitialHeader(): Promise
{ + return this.trees.getInitialHeader(this.includeUncommitted); + } + /** * Appends a set of leaf values to the tree. * @param treeId - Id of the tree to append leaves to. diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts index 850899e7d0c..c8ea4c1c3a0 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts @@ -169,4 +169,8 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations buildInitialHeader(): Promise
{ throw new Error('Building initial header not supported on snapshot.'); } + + getInitialHeader(): Promise
{ + throw new Error('Getting initial header not supported on snapshot.'); + } } diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 5d8be02b4dd..30d7ed5d209 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -99,6 +99,7 @@ export class MerkleTrees implements MerkleTreeDb { // gets initialized in #init private trees: MerkleTreeMap = null as any; private jobQueue = new SerialQueue(); + private initialHeader!: Header; private constructor(private store: AztecKVStore, private log: DebugLogger) {} @@ -172,13 +173,18 @@ export class MerkleTrees implements MerkleTreeDb { // We are not initializing from db so we need to populate the first leaf of the archive tree which is a hash of // the initial header. const initialHeader = await this.buildInitialHeader(true); + this.initialHeader = initialHeader; await this.#updateArchive(initialHeader, true); + } else { + // TODO(palla/prover-node) Running buildInitialHeader here yields a WRONG initial header, + // we need to reconstruct it from an empty state reference. + this.initialHeader = await this.buildInitialHeader(true); } await this.#commit(); } - // REFACTOR: Make this private. It only makes sense if called at the beginning of the lifecycle of the object. + // TODO(palla/prover-node): Make this private. It only makes sense if called at the beginning of the lifecycle of the object. public async buildInitialHeader(includeUncommitted: boolean): Promise
{ const state = await this.getStateReference(includeUncommitted); return new Header( @@ -190,6 +196,10 @@ export class MerkleTrees implements MerkleTreeDb { ); } + public getInitialHeader(): Promise
{ + return Promise.resolve(this.initialHeader); + } + /** * Stops the job queue (waits for all jobs to finish). */ diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 244cd7ec0fa..8fb9a895a8a 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -209,6 +209,7 @@ __metadata: "@aztec/p2p-bootstrap": "workspace:^" "@aztec/protocol-contracts": "workspace:^" "@aztec/prover-client": "workspace:^" + "@aztec/prover-node": "workspace:^" "@aztec/pxe": "workspace:^" "@aztec/telemetry-client": "workspace:^" "@aztec/txe": "workspace:^" @@ -413,6 +414,7 @@ __metadata: "@aztec/p2p": "workspace:^" "@aztec/protocol-contracts": "workspace:^" "@aztec/prover-client": "workspace:^" + "@aztec/prover-node": "workspace:^" "@aztec/pxe": "workspace:^" "@aztec/sequencer-client": "workspace:^" "@aztec/simulator": "workspace:^" @@ -787,6 +789,34 @@ __metadata: languageName: unknown linkType: soft +"@aztec/prover-node@workspace:^, @aztec/prover-node@workspace:prover-node": + version: 0.0.0-use.local + resolution: "@aztec/prover-node@workspace:prover-node" + dependencies: + "@aztec/archiver": "workspace:^" + "@aztec/circuit-types": "workspace:^" + "@aztec/circuits.js": "workspace:^" + "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:^" + "@aztec/prover-client": "workspace:^" + "@aztec/sequencer-client": "workspace:^" + "@aztec/simulator": "workspace:^" + "@aztec/telemetry-client": "workspace:^" + "@aztec/world-state": "workspace:^" + "@jest/globals": ^29.5.0 + "@types/jest": ^29.5.0 + "@types/memdown": ^3.0.0 + "@types/node": ^18.7.23 + "@types/source-map-support": ^0.5.10 + jest: ^29.5.0 + jest-mock-extended: ^3.0.3 + source-map-support: ^0.5.21 + ts-node: ^10.9.1 + tslib: ^2.4.0 + typescript: ^5.0.4 + languageName: unknown + linkType: soft + "@aztec/pxe@workspace:^, @aztec/pxe@workspace:pxe": version: 0.0.0-use.local resolution: "@aztec/pxe@workspace:pxe"