diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts index ff256dfcbf5..0ffb87dcaa6 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts @@ -1,7 +1,9 @@ import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; import { createDebugLogger } from '@aztec/foundation/log'; +import { Timer } from '@aztec/foundation/timer'; import { IndexedTreeLeaf, IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { Hasher, SiblingPath } from '@aztec/types'; +import { TreeInsertionStats } from '@aztec/types/stats'; import { LevelUp } from 'levelup'; @@ -507,6 +509,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { leaves: Buffer[], subtreeHeight: SubtreeHeight, ): Promise> { + const timer = new Timer(); const insertedKeys = new Map(); const emptyLowLeafWitness = getEmptyLowLeafWitness(this.getDepth() as TreeHeight, this.leafPreimageFactory); // Accumulators @@ -603,6 +606,15 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { // inclusion. See {@link encodeLeaf} for a more through param explanation. await this.encodeAndAppendLeaves(pendingInsertionSubtree, false); + this.log(`Inserted ${leaves.length} leaves into ${this.getName()} tree`, { + eventName: 'tree-insertion', + duration: timer.ms(), + batchSize: leaves.length, + treeName: this.getName(), + treeDepth: this.getDepth(), + treeType: 'indexed', + } satisfies TreeInsertionStats); + return { lowLeavesWitnessData: lowLeavesWitnesses, sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => leafTuple.leaf.toBuffer()), diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts index 55b4f532469..3c48cfd2212 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts @@ -1,3 +1,6 @@ +import { Timer } from '@aztec/foundation/timer'; +import { TreeInsertionStats } from '@aztec/types/stats'; + import { AppendOnlySnapshotBuilder, TreeSnapshot } from '../index.js'; import { AppendOnlyTree } from '../interfaces/append_only_tree.js'; import { TreeBase } from '../tree_base.js'; @@ -14,7 +17,16 @@ export class StandardTree extends TreeBase implements AppendOnlyTree { * @returns Empty promise. */ public async appendLeaves(leaves: Buffer[]): Promise { + const timer = new Timer(); await super.appendLeaves(leaves); + this.log(`Inserted ${leaves.length} leaves into ${this.getName()} tree`, { + eventName: 'tree-insertion', + duration: timer.ms(), + batchSize: leaves.length, + treeName: this.getName(), + treeDepth: this.getDepth(), + treeType: 'append-only', + } satisfies TreeInsertionStats); } public snapshot(block: number): Promise { diff --git a/yarn-project/merkle-tree/src/tree_base.ts b/yarn-project/merkle-tree/src/tree_base.ts index 291ac258082..befad55ae72 100644 --- a/yarn-project/merkle-tree/src/tree_base.ts +++ b/yarn-project/merkle-tree/src/tree_base.ts @@ -1,4 +1,5 @@ import { toBigIntLE, toBufferLE } from '@aztec/foundation/bigint-buffer'; +import { DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { Hasher, SiblingPath } from '@aztec/types'; import { LevelUp, LevelUpChain } from 'levelup'; @@ -36,6 +37,7 @@ export abstract class TreeBase implements MerkleTree { private root!: Buffer; private zeroHashes: Buffer[] = []; private cache: { [key: string]: Buffer } = {}; + protected log: DebugLogger; public constructor( protected db: LevelUp, @@ -58,6 +60,8 @@ export abstract class TreeBase implements MerkleTree { this.root = root ? root : current; this.maxIndex = 2n ** BigInt(depth) - 1n; + + this.log = createDebugLogger(`aztec:merkle-tree:${name}`); } /** diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index db3a905ee7b..51d9df89fab 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -23,6 +23,7 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; import { SerialQueue } from '@aztec/foundation/fifo'; import { DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { Timer } from '@aztec/foundation/timer'; import { NoirWasmVersion } from '@aztec/noir-compiler/versions'; import { AuthWitness, @@ -52,6 +53,7 @@ import { getNewContractPublicFunctions, isNoirCallStackUnresolved, } from '@aztec/types'; +import { TxPXEProcessingStats } from '@aztec/types/stats'; import { PXEServiceConfig, getPackageInfo } from '../config/index.js'; import { ContractDataOracle } from '../contract_data_oracle/index.js'; @@ -353,7 +355,13 @@ export class PXEService implements PXE { const deployedContractAddress = txRequest.txContext.isContractDeploymentTx ? txRequest.origin : undefined; const newContract = deployedContractAddress ? await this.db.getContract(deployedContractAddress) : undefined; + const timer = new Timer(); const tx = await this.#simulateAndProve(txRequest, newContract); + this.log(`Processed private part of ${tx.data.end.newNullifiers[0]}`, { + eventName: 'tx-pxe-processing', + duration: timer.ms(), + ...tx.getStats(), + } satisfies TxPXEProcessingStats); if (simulatePublic) { await this.#simulatePublicCalls(tx); } diff --git a/yarn-project/scripts/src/benchmarks/aggregate.ts b/yarn-project/scripts/src/benchmarks/aggregate.ts index 3dab2f4f54f..bb0dbb21fe1 100644 --- a/yarn-project/scripts/src/benchmarks/aggregate.ts +++ b/yarn-project/scripts/src/benchmarks/aggregate.ts @@ -24,7 +24,10 @@ import { NodeSyncedChainHistoryStats, NoteProcessorCaughtUpStats, Stats, + TreeInsertionStats, TxAddedToPoolStats, + TxPXEProcessingStats, + TxSequencerProcessingStats, } from '@aztec/types/stats'; import * as fs from 'fs'; @@ -151,6 +154,26 @@ function processTxAddedToPool(entry: TxAddedToPoolStats, results: BenchmarkColle append(results, 'tx_size_in_bytes', entry.newContractCount, entry.size); } +/** Process entries for events tx-private-part-processed, grouped by new commitments */ +function processTxPXEProcessingStats(entry: TxPXEProcessingStats, results: BenchmarkCollectedResults) { + append(results, 'tx_pxe_processing_time_ms', entry.newCommitmentCount, entry.duration); +} + +/** Process entries for events tx-public-part-processed, grouped by public data writes */ +function processTxSequencerProcessingStats(entry: TxSequencerProcessingStats, results: BenchmarkCollectedResults) { + append(results, 'tx_sequencer_processing_time_ms', entry.publicDataUpdateRequests, entry.duration); +} + +/** Process a tree insertion event and updates results */ +function processTreeInsertion(entry: TreeInsertionStats, results: BenchmarkCollectedResults) { + const bucket = entry.batchSize; + if (entry.treeType === 'append-only') { + append(results, 'batch_insert_into_append_only_tree_ms', bucket, entry.duration); + } else if (entry.treeType === 'indexed') { + append(results, 'batch_insert_into_indexed_tree_ms', bucket, entry.duration); + } +} + /** Processes a parsed entry from a log-file and updates results */ function processEntry(entry: Stats, results: BenchmarkCollectedResults) { switch (entry.eventName) { @@ -168,6 +191,12 @@ function processEntry(entry: Stats, results: BenchmarkCollectedResults) { return processNodeSyncedChain(entry, results); case 'tx-added-to-pool': return processTxAddedToPool(entry, results); + case 'tx-pxe-processing': + return processTxPXEProcessingStats(entry, results); + case 'tx-sequencer-processing': + return processTxSequencerProcessingStats(entry, results); + case 'tree-insertion': + return processTreeInsertion(entry, results); default: return; } diff --git a/yarn-project/scripts/src/benchmarks/markdown.ts b/yarn-project/scripts/src/benchmarks/markdown.ts index 7b8b843fb36..163db3beefa 100644 --- a/yarn-project/scripts/src/benchmarks/markdown.ts +++ b/yarn-project/scripts/src/benchmarks/markdown.ts @@ -95,10 +95,13 @@ function getCell( row: string, col: string, ) { - const value = data[row][col]; + const value: number | undefined = data[row][col]; const formattedValue = formatValue(value); - const baseValue = base ? (base[row] ?? {})[col] : undefined; - const percentDiff = baseValue ? Math.round(((value - baseValue) / baseValue) * 100) : undefined; + const baseValue: number | undefined = base?.[row]?.[col]; + const percentDiff = + typeof baseValue === 'number' && baseValue > 0 && typeof value === 'number' + ? Math.round(((value - baseValue) / baseValue) * 100) + : undefined; if (!percentDiff || Math.abs(percentDiff) < 1) { return formattedValue; } @@ -118,7 +121,11 @@ function withDesc(name: string) { } /** Formats a numeric value for display. */ -function formatValue(value: number) { +function formatValue(value: number | undefined): string { + if (typeof value === 'undefined') { + return 'N/A'; + } + if (value < 100) { return value.toPrecision(3); } @@ -180,6 +187,10 @@ export function getMarkdown() { const metricsByChainLength = Metrics.filter(m => m.groupBy === 'chain-length').map(m => m.name); const metricsByCircuitName = Metrics.filter(m => m.groupBy === 'circuit-name').map(m => m.name); const metricsByContractCount = Metrics.filter(m => m.groupBy === 'contract-count').map(m => m.name); + const metricsByLeafCount = Metrics.filter(m => m.groupBy === 'leaf-count').map(m => m.name); + + const metricsTxPxeProcessing = Metrics.filter(m => m.name === 'tx_pxe_processing_time_ms').map(m => m.name); + const metricsTxSeqProcessing = Metrics.filter(m => m.name === 'tx_sequencer_processing_time_ms').map(m => m.name); const baseHash = process.env.BASE_COMMIT_HASH; const baseUrl = baseHash && `[\`${baseHash.slice(0, 8)}\`](${S3_URL}/benchmarks-v1/master/${baseHash}.json)`; @@ -202,7 +213,7 @@ ${getWarningsSummary(benchmark, baseBenchmark)} Detailed results -All benchmarks are run on txs on the \`Benchmarking\` contract on the repository. Each tx consists of a batch call to \`create_note\` and \`increment_balance\`, which guarantees that each tx has a private call, a nested private call, a public call, and a nested public call, as well as an emitted private note, an unencrypted log, and public storage read and write. +All benchmarks are run on txs on the \`Benchmarking\` contract on the repository. Each tx consists of a batch call to \`create_note\` and \`increment_balance\`, which guarantees that each tx has a private call, a nested private call, a public call, and a nested public call, as well as an emitted private note, an unencrypted log, and public storage read and write. ${prSourceDataText} ${baseCommitText} @@ -221,11 +232,20 @@ ${getTableContent(pick(benchmark, metricsByChainLength), baseBenchmark, 'blocks' Stats on running time and I/O sizes collected for every circuit run across all benchmarks. ${getTableContent(transpose(pick(benchmark, metricsByCircuitName)), transpose(baseBenchmark), '', 'Circuit')} +### Tree insertion stats + +The duration to insert a fixed batch of leaves into each tree type. +${getTableContent(pick(benchmark, metricsByLeafCount), baseBenchmark, 'leaves')} + ### Miscellaneous Transaction sizes based on how many contracts are deployed in the tx. ${getTableContent(pick(benchmark, metricsByContractCount), baseBenchmark, 'deployed contracts')} +Transaction processing duration by data writes. +${getTableContent(pick(benchmark, metricsTxPxeProcessing), baseBenchmark, 'new commitments')} +${getTableContent(pick(benchmark, metricsTxSeqProcessing), baseBenchmark, 'public data writes')} + ${COMMENT_MARK} `; diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.ts index fba66b0b797..88b470bd834 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.ts @@ -44,7 +44,9 @@ import { computeVarArgsHash } from '@aztec/circuits.js/abis'; import { arrayNonEmptyLength, isArrayEmpty, padArrayEnd } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; import { to2Fields } from '@aztec/foundation/serialize'; +import { Timer } from '@aztec/foundation/timer'; import { ContractDataSource, FunctionL2Logs, L1ToL2MessageSource, MerkleTreeId, Tx } from '@aztec/types'; +import { TxSequencerProcessingStats } from '@aztec/types/stats'; import { MerkleTreeOperations } from '@aztec/world-state'; import { getVerificationKeys } from '../index.js'; @@ -159,12 +161,23 @@ export class PublicProcessor { protected async processTx(tx: Tx): Promise { if (!isArrayEmpty(tx.data.end.publicCallStack, item => item.isEmpty())) { + const timer = new Timer(); + const [publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs] = await this.processEnqueuedPublicCalls( tx, ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); - return makeProcessedTx(tx, publicKernelOutput, publicKernelProof); + const processedTransaction = await makeProcessedTx(tx, publicKernelOutput, publicKernelProof); + this.log(`Processed public part of ${tx.data.end.newNullifiers[0]}`, { + eventName: 'tx-sequencer-processing', + duration: timer.ms(), + publicDataUpdateRequests: + processedTransaction.data.end.publicDataUpdateRequests.filter(x => !x.leafSlot.isZero()).length ?? 0, + ...tx.getStats(), + } satisfies TxSequencerProcessingStats); + + return processedTransaction; } else { return makeProcessedTx(tx); } diff --git a/yarn-project/types/src/logs/log_filter.ts b/yarn-project/types/src/logs/log_filter.ts index a1448527edd..c77c73b2d90 100644 --- a/yarn-project/types/src/logs/log_filter.ts +++ b/yarn-project/types/src/logs/log_filter.ts @@ -1,6 +1,6 @@ import { AztecAddress, FunctionSelector } from '@aztec/circuits.js'; -import { TxHash } from '../tx/index.js'; +import { TxHash } from '../tx/tx_hash.js'; import { LogId } from './log_id.js'; /** diff --git a/yarn-project/types/src/stats/metrics.ts b/yarn-project/types/src/stats/metrics.ts index 235e6284ef2..8e8c3736fda 100644 --- a/yarn-project/types/src/stats/metrics.ts +++ b/yarn-project/types/src/stats/metrics.ts @@ -1,7 +1,13 @@ import { StatsEventName } from './stats.js'; /** How a metric is grouped in benchmarks: by block size, by length of chain processed, or by circuit name. */ -export type MetricGroupBy = 'block-size' | 'chain-length' | 'circuit-name' | 'contract-count'; +export type MetricGroupBy = + | 'block-size' + | 'chain-length' + | 'circuit-name' + | 'contract-count' + | 'leaf-count' + | 'data-writes'; /** Definition of a metric to track in benchmarks. */ export interface Metric { @@ -127,6 +133,30 @@ export const Metrics = [ description: 'Size of txs received in the mempool.', events: ['tx-added-to-pool'], }, + { + name: 'tx_pxe_processing_time_ms', + groupBy: 'data-writes', + description: 'Time to process the private part of a tx.', + events: ['tx-pxe-processing'], + }, + { + name: 'tx_sequencer_processing_time_ms', + groupBy: 'data-writes', + description: 'Time to process the public part of a tx.', + events: ['tx-sequencer-processing'], + }, + { + name: 'batch_insert_into_append_only_tree_ms', + groupBy: 'leaf-count', + description: 'Time to insert a batch of leaves into an append-only tree', + events: ['tree-insertion'], + }, + { + name: 'batch_insert_into_indexed_tree_ms', + groupBy: 'leaf-count', + description: 'Time to insert a batch of leaves into an indexed tree', + events: ['tree-insertion'], + }, ] as const satisfies readonly Metric[]; /** Metric definitions to track from benchmarks. */ diff --git a/yarn-project/types/src/stats/stats.ts b/yarn-project/types/src/stats/stats.ts index 22c7373389a..19a0535c922 100644 --- a/yarn-project/types/src/stats/stats.ts +++ b/yarn-project/types/src/stats/stats.ts @@ -138,6 +138,50 @@ export type TxStats = { newContractDataSize: number; /** Number of new contracts deployed in this tx. */ newContractCount: number; + /** comm */ + newCommitmentCount: number; + /** a */ + newNullifierCount: number; +}; + +/** + * Stats for a tx that has been processed by the public processor. + */ +export type TxPXEProcessingStats = { + /** Name of the event. */ + eventName: 'tx-pxe-processing'; + /** Duration in ms. */ + duration: number; +} & TxStats; + +/** + * Stats for a tx that has been processed by the public processor. + */ +export type TxSequencerProcessingStats = { + /** Name of the event. */ + eventName: 'tx-sequencer-processing'; + /** Duration in ms. */ + duration: number; + /** Count of how many public writes this tx has made. Acts as a proxy for how 'heavy' this tx */ + publicDataUpdateRequests: number; +} & TxStats; + +/** + * Stats for tree insertions + */ +export type TreeInsertionStats = { + /** Name of the event. */ + eventName: 'tree-insertion'; + /** Duration in ms. */ + duration: number; + /** The size of the insertion batch */ + batchSize: number; + /** The tree name */ + treeName: string; + /** The tree depth */ + treeDepth: number; + /** Tree type */ + treeType: 'append-only' | 'indexed'; }; /** A new tx was added to the tx pool. */ @@ -154,7 +198,10 @@ export type Stats = | L2BlockBuiltStats | L2BlockHandledStats | NoteProcessorCaughtUpStats - | TxAddedToPoolStats; + | TxAddedToPoolStats + | TxPXEProcessingStats + | TxSequencerProcessingStats + | TreeInsertionStats; /** Set of event names across emitted stats. */ export type StatsEventName = Stats['eventName']; diff --git a/yarn-project/types/src/tx/tx.ts b/yarn-project/types/src/tx/tx.ts index f9c57272df5..c8d9c9f7977 100644 --- a/yarn-project/types/src/tx/tx.ts +++ b/yarn-project/types/src/tx/tx.ts @@ -5,7 +5,7 @@ import { BufferReader, Tuple } from '@aztec/foundation/serialize'; import { ExtendedContractData } from '../contract_data.js'; import { GetUnencryptedLogsResponse } from '../logs/get_unencrypted_logs_response.js'; -import { L2LogsSource } from '../logs/index.js'; +import { L2LogsSource } from '../logs/l2_logs_source.js'; import { TxL2Logs } from '../logs/tx_l2_logs.js'; import { TxStats } from '../stats/stats.js'; import { TxHash } from './tx_hash.js'; @@ -168,6 +168,8 @@ export class Tx { unencryptedLogSize: this.unencryptedLogs.getSerializedLength(), newContractCount: this.newContracts.filter(c => !c.isEmpty()).length, newContractDataSize: this.newContracts.map(c => c.toBuffer().length).reduce((a, b) => a + b, 0), + newCommitmentCount: this.data!.end.newCommitments.filter(c => !c.isEmpty()).length, + newNullifierCount: this.data!.end.newNullifiers.filter(c => !c.isEmpty()).length, proofSize: this.proof.buffer.length, size: this.toBuffer().length, };