diff --git a/.circleci/config.yml b/.circleci/config.yml index ac54d75ec06b..0a8172081270 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -513,6 +513,16 @@ jobs: aztec_manifest_key: end-to-end <<: *defaults_e2e_test + bench-proving: + steps: + - *checkout + - *setup_env + - run: + name: "Benchmark" + command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose-no-sandbox.yml TEST=benchmarks/bench_proving.test.ts JOB_NAME="bench-proving" DEBUG=aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees,aztec:prover:* + aztec_manifest_key: end-to-end + <<: *defaults_e2e_test + build-docs: machine: image: default @@ -799,11 +809,13 @@ workflows: - bench-publish-rollup: *e2e_test - bench-process-history: *e2e_test - bench-tx-size: *e2e_test + - bench-proving: *e2e_test - bench-summary: requires: - bench-publish-rollup - bench-process-history - bench-tx-size + - bench-proving <<: *defaults # Production releases. diff --git a/yarn-project/aztec/src/cli/cmds/start_prover.ts b/yarn-project/aztec/src/cli/cmds/start_prover.ts index 103ca97c8df0..7c39fe6e16a7 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover.ts @@ -1,6 +1,8 @@ import { type ProvingJobSource } from '@aztec/circuit-types'; import { ProverPool, createProvingJobSourceClient } from '@aztec/prover-client/prover-pool'; +import { tmpdir } from 'node:os'; + import { type ServiceStarter, parseModuleOptions } from '../util.js'; type ProverOptions = Partial<{ @@ -35,6 +37,8 @@ export const startProver: ServiceStarter = async (options, signalHandlers, logge { acvmBinaryPath: proverOptions.acvmBinaryPath, bbBinaryPath: proverOptions.bbBinaryPath, + acvmWorkingDirectory: tmpdir(), + bbWorkingDirectory: tmpdir(), }, agentCount, ); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts new file mode 100644 index 000000000000..a17edfbe984d --- /dev/null +++ b/yarn-project/end-to-end/src/benchmarks/bench_proving.test.ts @@ -0,0 +1,81 @@ +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { BatchCall, EthAddress, Fq, Fr, TxStatus, type Wallet } from '@aztec/aztec.js'; +import { TestContract } from '@aztec/noir-contracts.js'; +import { ProverPool } from '@aztec/prover-client/prover-pool'; + +import { jest } from '@jest/globals'; + +import { getACVMConfig } from '../fixtures/get_acvm_config.js'; +import { getBBConfig } from '../fixtures/get_bb_config.js'; +import { type EndToEndContext, setupNoL2Deploy } from '../fixtures/utils.js'; + +jest.setTimeout(1_000_000); + +// seconds! +const txTimeoutSec = 200; + +describe('benchmarks/proving', () => { + let ctx: Omit; + let wallet: Wallet; + let testContract: TestContract; + let proverPool: ProverPool; + let acvmCleanup: () => Promise; + let bbCleanup: () => Promise; + + // setup the environment + beforeAll(async () => { + ctx = await setupNoL2Deploy({ + proverAgents: 0, + }); + + const [acvmConfig, bbConfig] = await Promise.all([getACVMConfig(ctx.logger), getBBConfig(ctx.logger)]); + if (!acvmConfig || !bbConfig) { + throw new Error('Missing ACVM or BB config'); + } + + acvmCleanup = acvmConfig.cleanup; + bbCleanup = bbConfig.cleanup; + + proverPool = ProverPool.nativePool( + { + acvmBinaryPath: acvmConfig.acvmBinaryPath, + acvmWorkingDirectory: acvmConfig.acvmWorkingDirectory, + bbBinaryPath: bbConfig.bbBinaryPath, + bbWorkingDirectory: bbConfig.bbWorkingDirectory, + }, + 1, + 10, + ); + + await proverPool.start(ctx.prover!.getProvingJobSource()); + + wallet = await getSchnorrAccount(ctx.pxe, Fr.random(), Fq.random()).deploy().getWallet({ + timeout: txTimeoutSec, + }); + testContract = await TestContract.deploy(wallet).send().deployed({ + timeout: txTimeoutSec, + }); + }); + + afterAll(async () => { + await proverPool.stop(); + await ctx.teardown(); + + await acvmCleanup(); + await bbCleanup(); + }); + + it('executes a batch call', async () => { + const call = new BatchCall(wallet, [ + testContract.methods.emit_nullifier(42).request(), + testContract.methods.call_create_note(43, wallet.getAddress(), 44).request(), + testContract.methods.create_l2_to_l1_message_public(45, 46, EthAddress.random()).request(), + testContract.methods.emit_unencrypted(47).request(), + ]); + + const receipt = await call.send().wait({ + timeout: txTimeoutSec, + }); + expect(receipt.status).toBe(TxStatus.MINED); + }); +}); diff --git a/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts b/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts index 556726411137..a8c8349a6cec 100644 --- a/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts +++ b/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts @@ -13,14 +13,21 @@ const { } = process.env; // Determines if we have access to the acvm binary and a tmp folder for temp files -export async function getACVMConfig(logger: DebugLogger) { +export async function getACVMConfig(logger: DebugLogger): Promise< + | { + acvmWorkingDirectory: string; + acvmBinaryPath: string; + cleanup: () => Promise; + } + | undefined +> { try { - const expectedAcvmPath = ACVM_BINARY_PATH ? ACVM_BINARY_PATH : `../../noir/${NOIR_RELEASE_DIR}/acvm`; - await fs.access(expectedAcvmPath, fs.constants.R_OK); + const acvmBinaryPath = ACVM_BINARY_PATH ? ACVM_BINARY_PATH : `../../noir/${NOIR_RELEASE_DIR}/acvm`; + await fs.access(acvmBinaryPath, fs.constants.R_OK); const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; await fs.mkdir(acvmWorkingDirectory, { recursive: true }); - logger.verbose(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); + logger.verbose(`Using native ACVM binary at ${acvmBinaryPath} with working directory ${acvmWorkingDirectory}`); const directoryToCleanup = ACVM_WORKING_DIRECTORY ? undefined : tempWorkingDirectory; @@ -33,7 +40,7 @@ export async function getACVMConfig(logger: DebugLogger) { return { acvmWorkingDirectory, - expectedAcvmPath, + acvmBinaryPath, cleanup, }; } catch (err) { diff --git a/yarn-project/end-to-end/src/fixtures/get_bb_config.ts b/yarn-project/end-to-end/src/fixtures/get_bb_config.ts new file mode 100644 index 000000000000..412c93164579 --- /dev/null +++ b/yarn-project/end-to-end/src/fixtures/get_bb_config.ts @@ -0,0 +1,46 @@ +import { type DebugLogger, fileURLToPath } from '@aztec/aztec.js'; + +import fs from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import path from 'path'; + +const { + BB_RELEASE_DIR = 'barretenberg/cpp/build/bin', + BB_BINARY_PATH, + TEMP_DIR = tmpdir(), + BB_WORKING_DIRECTORY = '', +} = process.env; + +export const getBBConfig = async ( + logger: DebugLogger, +): Promise<{ bbBinaryPath: string; bbWorkingDirectory: string; cleanup: () => Promise } | undefined> => { + try { + const bbBinaryPath = + BB_BINARY_PATH ?? + path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../', BB_RELEASE_DIR, 'bb'); + await fs.access(bbBinaryPath, fs.constants.R_OK); + + let bbWorkingDirectory: string; + let directoryToCleanup: string | undefined; + + if (BB_WORKING_DIRECTORY) { + bbWorkingDirectory = BB_WORKING_DIRECTORY; + } else { + bbWorkingDirectory = await fs.mkdtemp(path.join(TEMP_DIR, 'bb-')); + directoryToCleanup = bbWorkingDirectory; + } + + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + + const cleanup = async () => { + if (directoryToCleanup) { + await fs.rm(directoryToCleanup, { recursive: true, force: true }); + } + }; + + return { bbBinaryPath, bbWorkingDirectory, cleanup }; + } catch (err) { + logger.error(`Native BB not available, error: ${err}`); + return undefined; + } +}; diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 9fe93aafd776..a77f661f34c2 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -202,7 +202,7 @@ export class SnapshotManager { const acvmConfig = await getACVMConfig(this.logger); if (acvmConfig) { aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; - aztecNodeConfig.acvmBinaryPath = acvmConfig.expectedAcvmPath; + aztecNodeConfig.acvmBinaryPath = acvmConfig.acvmBinaryPath; } this.logger.verbose('Creating and synching an aztec node...'); @@ -254,7 +254,7 @@ export class SnapshotManager { const acvmConfig = await getACVMConfig(this.logger); if (acvmConfig) { aztecNodeConfig.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; - aztecNodeConfig.acvmBinaryPath = acvmConfig.expectedAcvmPath; + aztecNodeConfig.acvmBinaryPath = acvmConfig.acvmBinaryPath; } this.logger.verbose('Creating aztec node...'); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index df58258d1f69..f951a181a0eb 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -22,7 +22,6 @@ import { createDebugLogger, createPXEClient, deployL1Contracts, - fileURLToPath, makeFetch, waitForPXE, } from '@aztec/aztec.js'; @@ -33,7 +32,6 @@ import { computeContractAddressFromInstance, getContractClassFromArtifact, } from '@aztec/circuits.js'; -import { randomBytes } from '@aztec/foundation/crypto'; import { makeBackoff, retry } from '@aztec/foundation/retry'; import { AvailabilityOracleAbi, @@ -55,11 +53,11 @@ import { KeyRegistryContract } from '@aztec/noir-contracts.js'; import { GasTokenContract } from '@aztec/noir-contracts.js/GasToken'; import { getCanonicalGasToken, getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; +import { type ProverClient } from '@aztec/prover-client'; import { PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { type SequencerClient } from '@aztec/sequencer-client'; import { type Anvil, createAnvil } from '@viem/anvil'; -import * as fs from 'fs/promises'; import getPort from 'get-port'; import * as path from 'path'; import { @@ -77,44 +75,17 @@ import { mnemonicToAccount } from 'viem/accounts'; import { foundry } from 'viem/chains'; import { MNEMONIC } from './fixtures.js'; +import { getACVMConfig } from './get_acvm_config.js'; import { isMetricsLoggingRequested, setupMetricsLogger } from './logging.js'; export { deployAndInitializeTokenAndBridgeContracts } from '../shared/cross_chain_test_harness.js'; -const { - PXE_URL = '', - NOIR_RELEASE_DIR = 'noir-repo/target/release', - TEMP_DIR = '/tmp', - ACVM_BINARY_PATH = '', - ACVM_WORKING_DIRECTORY = '', -} = process.env; +const { PXE_URL = '' } = process.env; const getAztecUrl = () => { return PXE_URL; }; -// Determines if we have access to the acvm binary and a tmp folder for temp files -const getACVMConfig = async (logger: DebugLogger) => { - try { - const expectedAcvmPath = ACVM_BINARY_PATH - ? ACVM_BINARY_PATH - : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../noir/', NOIR_RELEASE_DIR)}/acvm`; - await fs.access(expectedAcvmPath, fs.constants.R_OK); - const tempWorkingDirectory = `${TEMP_DIR}/${randomBytes(4).toString('hex')}`; - const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${tempWorkingDirectory}/acvm`; - await fs.mkdir(acvmWorkingDirectory, { recursive: true }); - logger.info(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); - return { - acvmWorkingDirectory, - expectedAcvmPath, - directoryToCleanup: ACVM_WORKING_DIRECTORY ? undefined : tempWorkingDirectory, - }; - } catch (err) { - logger.error(`Native ACVM not available, error: ${err}`); - return undefined; - } -}; - export const setupL1Contracts = async ( l1RpcUrl: string, account: HDAccount | PrivateKeyAccount, @@ -279,7 +250,7 @@ async function setupWithRemoteEnvironment( const { chainId, protocolVersion } = await pxeClient.getNodeInfo(); // this contract might already have been deployed - // the following deployin functions are idempotent + // the following deploying functions are idempotent await deployCanonicalKeyRegistry( new SignerlessWallet(pxeClient, new DefaultMultiCallEntrypoint(chainId, protocolVersion)), ); @@ -293,6 +264,7 @@ async function setupWithRemoteEnvironment( return { aztecNode, sequencer: undefined, + prover: undefined, pxe: pxeClient, deployL1ContractsValues, accounts: await pxeClient!.getRegisteredAccounts(), @@ -333,22 +305,17 @@ export type EndToEndContext = { logger: DebugLogger; /** The cheat codes. */ cheatCodes: CheatCodes; + /** Proving jobs */ + prover: ProverClient | undefined; /** Function to stop the started services. */ teardown: () => Promise; }; -/** - * Sets up the environment for the end-to-end tests. - * @param numberOfAccounts - The number of new accounts to be created once the PXE is initiated. - * @param opts - Options to pass to the node initialization and to the setup script. - * @param pxeOpts - Options to pass to the PXE initialization. - */ -export async function setup( - numberOfAccounts = 1, +export async function setupNoL2Deploy( opts: SetupOptions = {}, pxeOpts: Partial = {}, - enableGas = false, -): Promise { +): Promise> { + const logger = getLogger(); const config = { ...getConfigEnvVars(), ...opts }; let anvil: Anvil | undefined; @@ -380,6 +347,7 @@ export async function setup( // Enable logging metrics to a local file named after the test suite if (isMetricsLoggingRequested()) { const filename = path.join('log', getJobName() + '.jsonl'); + logger.info(`Logging metrics to ${filename}`); setupMetricsLogger(filename); } @@ -388,14 +356,13 @@ export async function setup( await ethCheatCodes.loadChainState(opts.stateLoad); } - const logger = getLogger(); const hdAccount = mnemonicToAccount(MNEMONIC); const privKeyRaw = hdAccount.getHdKey().privateKey; const publisherPrivKey = privKeyRaw === null ? null : Buffer.from(privKeyRaw); if (PXE_URL) { // we are setting up against a remote environment, l1 contracts are assumed to already be deployed - return await setupWithRemoteEnvironment(hdAccount, config, logger, numberOfAccounts, enableGas); + return setupWithRemoteEnvironment(hdAccount, config, logger, 0, false); } const deployL1ContractsValues = @@ -409,26 +376,15 @@ export async function setup( const acvmConfig = await getACVMConfig(logger); if (acvmConfig) { config.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; - config.acvmBinaryPath = acvmConfig.expectedAcvmPath; + config.acvmBinaryPath = acvmConfig.acvmBinaryPath; } config.l1BlockPublishRetryIntervalMS = 100; const aztecNode = await AztecNodeService.createAndSync(config); const sequencer = aztecNode.getSequencer(); + const prover = aztecNode.getProver(); logger.verbose('Creating a pxe...'); - const { pxe, wallets } = await setupPXEService(numberOfAccounts, aztecNode!, pxeOpts, logger); - - logger.verbose('Deploying key registry...'); - await deployCanonicalKeyRegistry( - new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(config.chainId, config.version)), - ); - - if (enableGas) { - logger.verbose('Deploying gas token...'); - await deployCanonicalGasToken( - new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(config.chainId, config.version)), - ); - } + const { pxe } = await setupPXEService(0, aztecNode!, pxeOpts, logger, false); const cheatCodes = CheatCodes.create(config.rpcUrl, pxe!); @@ -440,10 +396,10 @@ export async function setup( await pxe?.stop(); } - if (acvmConfig?.directoryToCleanup) { + if (acvmConfig?.cleanup) { // remove the temp directory created for the acvm - logger.verbose(`Cleaning up ACVM temp directory ${acvmConfig.directoryToCleanup}`); - await fs.rm(acvmConfig.directoryToCleanup, { recursive: true, force: true }); + logger.verbose(`Cleaning up ACVM state`); + await acvmConfig.cleanup(); } await anvil?.stop(); @@ -454,15 +410,54 @@ export async function setup( pxe, deployL1ContractsValues, config, - wallet: wallets[0], - wallets, logger, cheatCodes, sequencer, + prover, + wallets: [], teardown, }; } +/** + * Sets up the environment for the end-to-end tests. + * @param numberOfAccounts - The number of new accounts to be created once the PXE is initiated. + * @param opts - Options to pass to the node initialization and to the setup script. + * @param pxeOpts - Options to pass to the PXE initialization. + */ +export async function setup( + numberOfAccounts = 1, + opts: SetupOptions = {}, + pxeOpts: Partial = {}, + enableGas = false, +): Promise { + const ctx = await setupNoL2Deploy(opts, pxeOpts); + const wallets = await getDeployedTestAccountsWallets(ctx.pxe); + if (wallets.length < numberOfAccounts) { + const numNewAccounts = numberOfAccounts - wallets.length; + ctx.logger.verbose(`Deploying ${numNewAccounts} accounts...`); + wallets.push(...(await createAccounts(ctx.pxe, numNewAccounts))); + } + + ctx.logger.verbose('Deploying key registry...'); + await deployCanonicalKeyRegistry( + new SignerlessWallet(ctx.pxe, new DefaultMultiCallEntrypoint(ctx.config.chainId, ctx.config.version)), + ); + + if (enableGas) { + ctx.logger.verbose('Deploying gas token...'); + await deployCanonicalGasToken( + new SignerlessWallet(ctx.pxe, new DefaultMultiCallEntrypoint(ctx.config.chainId, ctx.config.version)), + ); + } + + return { + ...ctx, + wallets, + wallet: wallets[0], + }; +} + /** * Registers the contract class used for test accounts and publicly deploys the instances requested. * Use this when you need to make a public call to an account contract, such as for requesting a public authwit. @@ -594,7 +589,7 @@ export async function expectMapping( /** * Deploy the protocol contracts to a running instance. */ -export async function deployCanonicalGasToken(deployer: Wallet) { +export async function deployCanonicalGasToken(deployer: Wallet, timeout?: number) { // "deploy" the Gas token as it contains public functions const gasPortalAddress = (await deployer.getNodeInfo()).l1ContractAddresses.gasPortalAddress; const canonicalGasToken = getCanonicalGasToken(gasPortalAddress); @@ -605,14 +600,14 @@ export async function deployCanonicalGasToken(deployer: Wallet) { const gasToken = await GasTokenContract.deploy(deployer, gasPortalAddress) .send({ contractAddressSalt: canonicalGasToken.instance.salt, universalDeploy: true }) - .deployed(); + .deployed({ timeout }); await expect(deployer.isContractClassPubliclyRegistered(gasToken.instance.contractClassId)).resolves.toBe(true); await expect(deployer.getContractInstance(gasToken.address)).resolves.toBeDefined(); await expect(deployer.isContractPubliclyDeployed(gasToken.address)).resolves.toBe(true); } -async function deployCanonicalKeyRegistry(deployer: Wallet) { +async function deployCanonicalKeyRegistry(deployer: Wallet, timeout?: number) { const canonicalKeyRegistry = getCanonicalKeyRegistry(); // We check to see if there exists a contract at the canonical Key Registry address with the same contract class id as we expect. This means that @@ -628,7 +623,7 @@ async function deployCanonicalKeyRegistry(deployer: Wallet) { const keyRegistry = await KeyRegistryContract.deploy(deployer) .send({ contractAddressSalt: canonicalKeyRegistry.instance.salt, universalDeploy: true }) - .deployed(); + .deployed({ timeout }); if ( !keyRegistry.address.equals(canonicalKeyRegistry.address) || diff --git a/yarn-project/prover-client/src/index.ts b/yarn-project/prover-client/src/index.ts index c47f1852f991..4331fcaff0ea 100644 --- a/yarn-project/prover-client/src/index.ts +++ b/yarn-project/prover-client/src/index.ts @@ -1,3 +1,5 @@ +export { ProverClient } from '@aztec/circuit-types'; + export * from './tx-prover/tx-prover.js'; export * from './config.js'; export * from './dummy-prover.js'; diff --git a/yarn-project/prover-client/src/prover-pool/prover-pool.ts b/yarn-project/prover-client/src/prover-pool/prover-pool.ts index 0750b41520e1..ebb5b7b15da5 100644 --- a/yarn-project/prover-client/src/prover-pool/prover-pool.ts +++ b/yarn-project/prover-client/src/prover-pool/prover-pool.ts @@ -2,7 +2,6 @@ import { type ProvingJobSource } from '@aztec/circuit-types'; import { type SimulationProvider } from '@aztec/simulator'; import { mkdtemp } from 'fs/promises'; -import { tmpdir } from 'os'; import { join } from 'path'; import { BBNativeRollupProver, type BBProverConfig } from '../prover/bb_prover.js'; @@ -55,22 +54,18 @@ export class ProverPool { ); } - static nativePool( - { acvmBinaryPath, bbBinaryPath }: Pick, - size: number, - agentPollIntervalMS = 10, - ): ProverPool { + static nativePool(config: Omit, size: number, agentPollIntervalMS = 10): ProverPool { // TODO generate keys ahead of time so that each agent doesn't have to do it return new ProverPool(size, async i => { const [acvmWorkingDirectory, bbWorkingDirectory] = await Promise.all([ - mkdtemp(join(tmpdir(), 'acvm-')), - mkdtemp(join(tmpdir(), 'bb-')), + mkdtemp(join(config.acvmWorkingDirectory, 'agent-')), + mkdtemp(join(config.bbWorkingDirectory, 'agent-')), ]); return new ProverAgent( await BBNativeRollupProver.new({ - acvmBinaryPath, + acvmBinaryPath: config.acvmBinaryPath, acvmWorkingDirectory, - bbBinaryPath, + bbBinaryPath: config.bbBinaryPath, bbWorkingDirectory, }), agentPollIntervalMS, diff --git a/yarn-project/scripts/src/benchmarks/aggregate.ts b/yarn-project/scripts/src/benchmarks/aggregate.ts index ce1d89054638..db8579320dab 100644 --- a/yarn-project/scripts/src/benchmarks/aggregate.ts +++ b/yarn-project/scripts/src/benchmarks/aggregate.ts @@ -15,6 +15,7 @@ import { type BenchmarkMetricResults, type BenchmarkResults, type BenchmarkResultsWithTimestamp, + type CircuitProvingStats, type CircuitSimulationStats, type L1PublishStats, type L2BlockBuiltStats, @@ -22,6 +23,7 @@ import { type MetricName, type NodeSyncedChainHistoryStats, type NoteProcessorCaughtUpStats, + type ProofVerificationStats, type Stats, type TreeInsertionStats, type TxAddedToPoolStats, @@ -94,16 +96,38 @@ function processRollupBlockSynced(entry: L2BlockHandledStats, results: Benchmark * Processes an entry with event name 'circuit-simulated' and updates results * Buckets are circuit names */ -function processCircuitSimulation(entry: CircuitSimulationStats, results: BenchmarkCollectedResults) { +function processCircuitSimulation(entry: CircuitSimulationStats, results: BenchmarkCollectedResults, fileName: string) { const bucket = entry.circuitName; if (!bucket) { return; } - append(results, 'circuit_simulation_time_in_ms', bucket, entry.duration); + + if (fileName.includes('bench-proving')) { + append(results, 'circuit_native_simulation_time_in_ms', bucket, entry.duration); + } else { + append(results, 'circuit_wasm_simulation_time_in_ms', bucket, entry.duration); + } append(results, 'circuit_input_size_in_bytes', bucket, entry.inputSize); append(results, 'circuit_output_size_in_bytes', bucket, entry.outputSize); } +function processCircuitProving(entry: CircuitProvingStats, results: BenchmarkCollectedResults) { + const bucket = entry.circuitName; + if (!bucket) { + return; + } + append(results, 'circuit_proving_time_in_ms', bucket, entry.duration); + append(results, 'circuit_proof_size_in_bytes', bucket, entry.proofSize); +} + +function processProofVerification(entry: ProofVerificationStats, results: BenchmarkCollectedResults) { + const bucket = entry.circuitName; + if (!bucket) { + return; + } + append(results, 'proof_verification_time_in_ms', bucket, entry.duration); +} + /** * Processes an entry with event name 'note-processor-caught-up' and updates results * Buckets are rollup sizes for NOTE_DECRYPTING_TIME, or chain sizes for NOTE_HISTORY_DECRYPTING_TIME @@ -208,7 +232,11 @@ function processEntry(entry: Stats, results: BenchmarkCollectedResults, fileName case 'l2-block-handled': return processRollupBlockSynced(entry, results); case 'circuit-simulation': - return processCircuitSimulation(entry, results); + return processCircuitSimulation(entry, results, fileName); + case 'circuit-proving': + return processCircuitProving(entry, results); + case 'proof-verification': + return processProofVerification(entry, results); case 'note-processor-caught-up': return processNoteProcessorCaughtUp(entry, results); case 'l2-block-built': diff --git a/yarn-project/scripts/src/benchmarks/markdown.ts b/yarn-project/scripts/src/benchmarks/markdown.ts index 2887805cfdcf..0109632b4b4c 100644 --- a/yarn-project/scripts/src/benchmarks/markdown.ts +++ b/yarn-project/scripts/src/benchmarks/markdown.ts @@ -1,6 +1,6 @@ // Generate a markdown file with a table summary of the aggregated benchmarks. // If a benchmark-base file is available, shows the comparison against base (ie master in a PR). -import { BENCHMARK_HISTORY_BLOCK_SIZE, Metrics } from '@aztec/circuit-types/stats'; +import { BENCHMARK_HISTORY_BLOCK_SIZE, type MetricName, Metrics } from '@aztec/circuit-types/stats'; import { createConsoleLogger } from '@aztec/foundation/log'; import * as fs from 'fs'; @@ -180,12 +180,11 @@ ${rows.join('\n')} /** Creates a md with the benchmark contents. */ export function getMarkdown() { - const benchmark = JSON.parse(fs.readFileSync(inputFile, 'utf-8')); + const benchmark: Record = JSON.parse(fs.readFileSync(inputFile, 'utf-8')); const baseBenchmark = getBaseBenchmark(); const metricsByBlockSize = Metrics.filter(m => m.groupBy === 'block-size').map(m => m.name); const metricsByChainLength = Metrics.filter(m => m.groupBy === 'chain-length').map(m => m.name); - const metricsByCircuitName = Metrics.filter(m => m.groupBy === 'circuit-name').map(m => m.name); const metricsByClassesRegistered = Metrics.filter(m => m.groupBy === 'classes-registered').map(m => m.name); const metricsByFeePaymentMethod = Metrics.filter(m => m.groupBy === 'fee-payment-method').map(m => m.name); const metricsByLeafCount = Metrics.filter(m => m.groupBy === 'leaf-count').map(m => m.name); @@ -230,8 +229,28 @@ ${getTableContent(pick(benchmark, metricsByChainLength), baseBenchmark, 'blocks' ### Circuits stats -Stats on running time and I/O sizes collected for every circuit run across all benchmarks. -${getTableContent(transpose(pick(benchmark, metricsByCircuitName)), transpose(baseBenchmark), '', 'Circuit')} +Simulation time +${getTableContent( + transpose(pick(benchmark, ['circuit_wasm_simulation_time_in_ms', 'circuit_native_simulation_time_in_ms'])), + baseBenchmark, +)} + +Proving time +${getTableContent( + transpose(pick(benchmark, ['circuit_proving_time_in_ms', 'circuit_proof_size_in_bytes'])), + baseBenchmark, +)} + +Proof verification time +${getTableContent(transpose(pick(benchmark, 'proof_verification_time_in_ms')), baseBenchmark)} + +Circuit I/O size +${getTableContent( + transpose(pick(benchmark, ['circuit_input_size_in_bytes', 'circuit_output_size_in_bytes'])), + transpose(baseBenchmark), + '', + 'Circuit', +)} ### Tree insertion stats