Skip to content

Commit

Permalink
feat: Track proving times in prover stats in CLI (#8281)
Browse files Browse the repository at this point in the history
Adds an option to compute proving times in the prover-stats CLI command,
and split stats based on a configurable cutoff. Also ensures that the
same proven block is only counted once per prover.

Adds lodash.chunk. Sorry about that.
  • Loading branch information
spalladino authored Sep 3, 2024
1 parent 6863fe5 commit efad298
Show file tree
Hide file tree
Showing 7 changed files with 157 additions and 21 deletions.
5 changes: 3 additions & 2 deletions yarn-project/archiver/src/archiver/data_retrieval.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { type EthAddress } from '@aztec/foundation/eth-address';
import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log';
import { RollupAbi } from '@aztec/l1-artifacts';

import { type PublicClient, getAbiItem } from 'viem';
import { type Hex, type PublicClient, getAbiItem } from 'viem';

import {
getL2BlockProposedLogs,
Expand Down Expand Up @@ -147,7 +147,7 @@ export async function retrieveL2ProofVerifiedEvents(
rollupAddress: EthAddress,
searchStartBlock: bigint,
searchEndBlock?: bigint,
): Promise<{ l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr }[]> {
): Promise<{ l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr; txHash: Hex }[]> {
const logs = await publicClient.getLogs({
address: rollupAddress.toString(),
fromBlock: searchStartBlock,
Expand All @@ -160,5 +160,6 @@ export async function retrieveL2ProofVerifiedEvents(
l1BlockNumber: log.blockNumber,
l2BlockNumber: log.args.blockNumber,
proverId: Fr.fromString(log.args.proverId),
txHash: log.transactionHash,
}));
}
5 changes: 3 additions & 2 deletions yarn-project/archiver/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@ export * from './archiver/index.js';
export * from './rpc/index.js';
export * from './factory.js';

// We are not storing the info from these events in the archiver for now (and we don't really need to), so we expose this query directly
export { retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js';
export { retrieveL2ProofVerifiedEvents, retrieveBlockMetadataFromRollup } from './archiver/data_retrieval.js';

export { getL2BlockProposedLogs } from './archiver/eth_log_handlers.js';

const log = createDebugLogger('aztec:archiver');

Expand Down
2 changes: 2 additions & 0 deletions yarn-project/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
"@iarna/toml": "^2.2.5",
"@libp2p/peer-id-factory": "^3.0.4",
"commander": "^12.1.0",
"lodash.chunk": "^4.2.0",
"lodash.groupby": "^4.6.0",
"semver": "^7.5.4",
"solc": "^0.8.26",
Expand All @@ -86,6 +87,7 @@
"@aztec/protocol-contracts": "workspace:^",
"@jest/globals": "^29.5.0",
"@types/jest": "^29.5.0",
"@types/lodash.chunk": "^4.2.9",
"@types/lodash.groupby": "^4.6.9",
"@types/lodash.startcase": "^4.4.7",
"@types/node": "^18.7.23",
Expand Down
21 changes: 18 additions & 3 deletions yarn-project/cli/src/cmds/l1/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -285,17 +285,32 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL
ETHEREUM_HOST,
)
.addOption(l1ChainIdOption)
.option('--start-block <number>', 'The block number to start from', parseBigint, 1n)
.option('--start-block <number>', 'The L1 block number to start from', parseBigint, 1n)
.option('--end-block <number>', 'The last L1 block number to query', parseBigint)
.option('--batch-size <number>', 'The number of blocks to query in each batch', parseBigint, 100n)
.option('--proving-timeout <number>', 'Cutoff for proving time to consider a block', parseBigint)
.option('--l1-rollup-address <string>', 'Address of the rollup contract (required if node URL is not set)')
.option(
'--node-url <string>',
'JSON RPC URL of an Aztec node to retrieve the rollup contract address (required if L1 rollup address is not set)',
)
.option('--raw-logs', 'Output raw logs instead of aggregated stats')
.action(async options => {
const { proverStats } = await import('./prover_stats.js');
const { l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl } = options;
await proverStats({ l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl, log });
const { l1RpcUrl, chainId, l1RollupAddress, startBlock, endBlock, batchSize, nodeUrl, provingTimeout, rawLogs } =
options;
await proverStats({
l1RpcUrl,
chainId,
l1RollupAddress,
startBlock,
endBlock,
batchSize,
nodeUrl,
provingTimeout,
rawLogs,
log,
});
});

return program;
Expand Down
135 changes: 125 additions & 10 deletions yarn-project/cli/src/cmds/l1/prover_stats.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import { retrieveL2ProofVerifiedEvents } from '@aztec/archiver';
import { getL2BlockProposedLogs, retrieveL2ProofVerifiedEvents } from '@aztec/archiver';
import { createAztecNodeClient } from '@aztec/circuit-types';
import { EthAddress } from '@aztec/circuits.js';
import { createEthereumChain } from '@aztec/ethereum';
import { type LogFn, createDebugLogger } from '@aztec/foundation/log';
import { compactArray, mapValues, unique } from '@aztec/foundation/collection';
import { type LogFn, type Logger, createDebugLogger } from '@aztec/foundation/log';

import chunk from 'lodash.chunk';
import groupBy from 'lodash.groupby';
import { createPublicClient, http } from 'viem';
import { type PublicClient, createPublicClient, http } from 'viem';

export async function proverStats(opts: {
l1RpcUrl: string;
Expand All @@ -14,13 +16,18 @@ export async function proverStats(opts: {
nodeUrl: string | undefined;
log: LogFn;
startBlock: bigint;
endBlock: bigint | undefined;
batchSize: bigint;
provingTimeout: bigint | undefined;
rawLogs: boolean;
}) {
const debugLog = createDebugLogger('aztec:cli:prover_stats');
const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, log } = opts;
const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, provingTimeout, endBlock, rawLogs, log } =
opts;
if (!l1RollupAddress && !nodeUrl) {
throw new Error('Either L1 rollup address or node URL must be set');
}

const rollup = l1RollupAddress
? EthAddress.fromString(l1RollupAddress)
: await createAztecNodeClient(nodeUrl!)
Expand All @@ -29,22 +36,130 @@ export async function proverStats(opts: {

const chain = createEthereumChain(l1RpcUrl, chainId).chainInfo;
const publicClient = createPublicClient({ chain, transport: http(l1RpcUrl) });
const lastBlockNum = await publicClient.getBlockNumber();
const lastBlockNum = endBlock ?? (await publicClient.getBlockNumber());
debugLog.verbose(`Querying events on rollup at ${rollup.toString()} from ${startBlock} up to ${lastBlockNum}`);

// Get all events for L2 proof submissions
const events = await getL2ProofVerifiedEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup);

// If we only care for raw logs, output them
if (rawLogs) {
log(`l1_block_number, l2_block_number, prover_id, tx_hash`);
for (const event of events) {
const { l1BlockNumber, l2BlockNumber, proverId, txHash } = event;
log(`${l1BlockNumber}, ${l2BlockNumber}, ${proverId}, ${txHash}`);
}
return;
}

// If we don't have a proving timeout, we can just count the number of unique blocks per prover
if (!provingTimeout) {
const stats = groupBy(events, 'proverId');
log(`prover_id, total_blocks_proven`);
for (const proverId in stats) {
const uniqueBlocks = new Set(stats[proverId].map(e => e.l2BlockNumber));
log(`${proverId}, ${uniqueBlocks.size}`);
}
return;
}

// But if we do, fetch the events for each block submitted, so we can look up their timestamp
const blockEvents = await getL2BlockEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup);
debugLog.verbose(
`First L2 block within range is ${blockEvents[0]?.args.blockNumber} at L1 block ${blockEvents[0]?.blockNumber}`,
);

// Get the timestamps for every block on every log, both for proof and block submissions
const l1BlockNumbers = unique([...events.map(e => e.l1BlockNumber), ...blockEvents.map(e => e.blockNumber)]);
const l1BlockTimestamps: Record<string, bigint> = {};
for (const l1Batch of chunk(l1BlockNumbers, Number(batchSize))) {
const blocks = await Promise.all(
l1Batch.map(blockNumber => publicClient.getBlock({ includeTransactions: false, blockNumber })),
);
debugLog.verbose(`Queried ${blocks.length} L1 blocks between ${l1Batch[0]} and ${l1Batch[l1Batch.length - 1]}`);
for (const block of blocks) {
l1BlockTimestamps[block.number.toString()] = block.timestamp;
}
}

// Map from l2 block number to the l1 block in which it was submitted
const l2BlockSubmissions: Record<string, bigint> = {};
for (const blockEvent of blockEvents) {
l2BlockSubmissions[blockEvent.args.blockNumber.toString()] = blockEvent.blockNumber;
}

// Now calculate stats
const stats = mapValues(groupBy(events, 'proverId'), (blocks, proverId) =>
compactArray(
blocks.map(e => {
const provenTimestamp = l1BlockTimestamps[e.l1BlockNumber.toString()];
const uploadedBlockNumber = l2BlockSubmissions[e.l2BlockNumber.toString()];
if (!uploadedBlockNumber) {
debugLog.verbose(
`Skipping ${proverId}'s proof for L2 block ${e.l2BlockNumber} as it was before the start block`,
);
return undefined;
}
const uploadedTimestamp = l1BlockTimestamps[uploadedBlockNumber.toString()];
const provingTime = provenTimestamp - uploadedTimestamp;
debugLog.debug(
`prover=${e.proverId} blockNumber=${e.l2BlockNumber} uploaded=${uploadedTimestamp} proven=${provenTimestamp} time=${provingTime}`,
);
return { provenTimestamp, uploadedTimestamp, provingTime, ...e };
}),
),
);

log(`prover_id, blocks_proven_within_timeout, total_blocks_proven, avg_proving_time`);
for (const proverId in stats) {
const blocks = stats[proverId];
const withinTimeout = blocks.filter(b => b.provingTime <= provingTimeout);
const uniqueBlocksWithinTimeout = new Set(withinTimeout.map(e => e.l2BlockNumber));
const uniqueBlocks = new Set(blocks.map(e => e.l2BlockNumber));
const avgProvingTime =
blocks.length === 0 ? 0 : Math.ceil(Number(blocks.reduce((acc, b) => acc + b.provingTime, 0n)) / blocks.length);

log(`${proverId}, ${uniqueBlocksWithinTimeout.size}, ${uniqueBlocks.size}, ${avgProvingTime}`);
}
return;
}

async function getL2ProofVerifiedEvents(
startBlock: bigint,
lastBlockNum: bigint,
batchSize: bigint,
debugLog: Logger,
publicClient: PublicClient,
rollup: EthAddress,
) {
let blockNum = startBlock;
const events = [];
while (blockNum <= lastBlockNum) {
const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize;
debugLog.verbose(`Querying events from block ${blockNum} to ${end}`);
const newEvents = await retrieveL2ProofVerifiedEvents(publicClient, rollup, blockNum, end);
events.push(...newEvents);
debugLog.verbose(`Got ${newEvents.length} events`);
debugLog.verbose(`Got ${newEvents.length} events querying l2 proof verified from block ${blockNum} to ${end}`);
blockNum += batchSize;
}
return events;
}

const stats = groupBy(events, 'proverId');
for (const proverId in stats) {
log(`${proverId}, ${stats[proverId].length}`);
async function getL2BlockEvents(
startBlock: bigint,
lastBlockNum: bigint,
batchSize: bigint,
debugLog: Logger,
publicClient: PublicClient,
rollup: EthAddress,
) {
let blockNum = startBlock;
const events = [];
while (blockNum <= lastBlockNum) {
const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize;
const newEvents = await getL2BlockProposedLogs(publicClient, rollup, blockNum, end);
events.push(...newEvents);
debugLog.verbose(`Got ${newEvents.length} events querying l2 block submitted from block ${blockNum} to ${end}`);
blockNum += batchSize;
}
return events;
}
8 changes: 4 additions & 4 deletions yarn-project/foundation/src/collection/object.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
/** Returns a new object with the same keys and where each value has been passed through the mapping function. */
export function mapValues<K extends string | number | symbol, T, U>(
obj: Record<K, T>,
fn: (value: T) => U,
fn: (value: T, key: K) => U,
): Record<K, U>;
export function mapValues<K extends string | number | symbol, T, U>(
obj: Partial<Record<K, T>>,
fn: (value: T) => U,
fn: (value: T, key: K) => U,
): Partial<Record<K, U>>;
export function mapValues<K extends string | number | symbol, T, U>(
obj: Record<K, T>,
fn: (value: T) => U,
fn: (value: T, key: K) => U,
): Record<K, U> {
const result: Record<K, U> = {} as Record<K, U>;
for (const key in obj) {
result[key] = fn(obj[key]);
result[key] = fn(obj[key], key);
}
return result;
}
Expand Down
2 changes: 2 additions & 0 deletions yarn-project/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -463,6 +463,7 @@ __metadata:
"@jest/globals": ^29.5.0
"@libp2p/peer-id-factory": ^3.0.4
"@types/jest": ^29.5.0
"@types/lodash.chunk": ^4.2.9
"@types/lodash.groupby": ^4.6.9
"@types/lodash.startcase": ^4.4.7
"@types/node": ^18.7.23
Expand All @@ -471,6 +472,7 @@ __metadata:
commander: ^12.1.0
jest: ^29.5.0
jest-mock-extended: ^3.0.5
lodash.chunk: ^4.2.0
lodash.groupby: ^4.6.0
semver: ^7.5.4
solc: ^0.8.26
Expand Down

0 comments on commit efad298

Please sign in to comment.