Skip to content

Commit

Permalink
feat: Track proving times in prover stats in CLI
Browse files Browse the repository at this point in the history
Adds an option to compute proving times in the prover-stats CLI command,
and split stats based on a configurable cutoff. Also ensures that the
same proven block is only counted once per prover.

Adds lodash.chunk. Sorry about that.
  • Loading branch information
spalladino committed Sep 2, 2024
1 parent 89c16c3 commit 7a1ed69
Show file tree
Hide file tree
Showing 5 changed files with 121 additions and 15 deletions.
5 changes: 3 additions & 2 deletions yarn-project/archiver/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@ export * from './archiver/index.js';
export * from './rpc/index.js';
export * from './factory.js';

// We are not storing the info from these events in the archiver for now (and we don't really need to), so we expose this query directly
export { retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js';
export { retrieveL2ProofVerifiedEvents, retrieveBlockMetadataFromRollup } from './archiver/data_retrieval.js';

export { getL2BlockProcessedLogs } from './archiver/eth_log_handlers.js';

const log = createDebugLogger('aztec:archiver');

Expand Down
2 changes: 2 additions & 0 deletions yarn-project/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
"@iarna/toml": "^2.2.5",
"@libp2p/peer-id-factory": "^3.0.4",
"commander": "^12.1.0",
"lodash.chunk": "^4.2.0",
"lodash.groupby": "^4.6.0",
"semver": "^7.5.4",
"solc": "^0.8.26",
Expand All @@ -86,6 +87,7 @@
"@aztec/protocol-contracts": "workspace:^",
"@jest/globals": "^29.5.0",
"@types/jest": "^29.5.0",
"@types/lodash.chunk": "^4.2.9",
"@types/lodash.groupby": "^4.6.9",
"@types/lodash.startcase": "^4.4.7",
"@types/node": "^18.7.23",
Expand Down
18 changes: 15 additions & 3 deletions yarn-project/cli/src/cmds/l1/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,17 +196,29 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL
ETHEREUM_HOST,
)
.addOption(l1ChainIdOption)
.option('--start-block <number>', 'The block number to start from', parseBigint, 1n)
.option('--start-block <number>', 'The L1 block number to start from', parseBigint, 1n)
.option('--end-block <number>', 'The last L1 block number to query', parseBigint)
.option('--batch-size <number>', 'The number of blocks to query in each batch', parseBigint, 100n)
.option('--proving-timeout <number>', 'Cutoff for proving time to consider a block', parseBigint)
.option('--l1-rollup-address <string>', 'Address of the rollup contract (required if node URL is not set)')
.option(
'--node-url <string>',
'JSON RPC URL of an Aztec node to retrieve the rollup contract address (required if L1 rollup address is not set)',
)
.action(async options => {
const { proverStats } = await import('./prover_stats.js');
const { l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl } = options;
await proverStats({ l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl, log });
const { l1RpcUrl, chainId, l1RollupAddress, startBlock, endBlock, batchSize, nodeUrl, provingTimeout } = options;
await proverStats({
l1RpcUrl,
chainId,
l1RollupAddress,
startBlock,
endBlock,
batchSize,
nodeUrl,
provingTimeout,
log,
});
});

return program;
Expand Down
109 changes: 99 additions & 10 deletions yarn-project/cli/src/cmds/l1/prover_stats.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import { retrieveL2ProofVerifiedEvents } from '@aztec/archiver';
import { getL2BlockProcessedLogs, retrieveL2ProofVerifiedEvents } from '@aztec/archiver';
import { createAztecNodeClient } from '@aztec/circuit-types';
import { EthAddress } from '@aztec/circuits.js';
import { createEthereumChain } from '@aztec/ethereum';
import { type LogFn, createDebugLogger } from '@aztec/foundation/log';
import { unique } from '@aztec/foundation/collection';
import { type LogFn, type Logger, createDebugLogger } from '@aztec/foundation/log';

import chunk from 'lodash.chunk';
import groupBy from 'lodash.groupby';
import { createPublicClient, http } from 'viem';
import { type PublicClient, createPublicClient, http } from 'viem';

export async function proverStats(opts: {
l1RpcUrl: string;
Expand All @@ -14,13 +16,16 @@ export async function proverStats(opts: {
nodeUrl: string | undefined;
log: LogFn;
startBlock: bigint;
endBlock: bigint | undefined;
batchSize: bigint;
provingTimeout: bigint | undefined;
}) {
const debugLog = createDebugLogger('aztec:cli:prover_stats');
const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, log } = opts;
const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, provingTimeout, endBlock, log } = opts;
if (!l1RollupAddress && !nodeUrl) {
throw new Error('Either L1 rollup address or node URL must be set');
}

const rollup = l1RollupAddress
? EthAddress.fromString(l1RollupAddress)
: await createAztecNodeClient(nodeUrl!)
Expand All @@ -29,22 +34,106 @@ export async function proverStats(opts: {

const chain = createEthereumChain(l1RpcUrl, chainId).chainInfo;
const publicClient = createPublicClient({ chain, transport: http(l1RpcUrl) });
const lastBlockNum = await publicClient.getBlockNumber();
const lastBlockNum = endBlock ?? (await publicClient.getBlockNumber());
debugLog.verbose(`Querying events on rollup at ${rollup.toString()} from ${startBlock} up to ${lastBlockNum}`);

// Get all events for L2 proof submissions
const events = await getL2ProofVerifiedEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup);

// If we don't have a proving timeout, we can just count the number of unique blocks per prover
if (!provingTimeout) {
const stats = groupBy(events, 'proverId');
log(`prover_id, total_blocks_proven`);
for (const proverId in stats) {
const uniqueBlocks = new Set(stats[proverId].map(e => e.l2BlockNumber));
log(`${proverId}, ${uniqueBlocks.size}`);
}
return;
}

// But if we do, fetch the events for each block submitted, so we can look up their timestamp
const blockEvents = await getL2BlockEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup);

// Get the timestamps for every block on every log, both for proof and block submissions
const l1BlockNumbers = unique([...events.map(e => e.l1BlockNumber), ...blockEvents.map(e => e.blockNumber)]);
const l1BlockTimestamps: Record<string, bigint> = {};
for (const l1Batch of chunk(l1BlockNumbers, Number(batchSize))) {
const blocks = await Promise.all(
l1Batch.map(blockNumber => publicClient.getBlock({ includeTransactions: false, blockNumber })),
);
debugLog.verbose(`Queried ${blocks.length} L1 blocks between ${l1Batch[0]} and ${l1Batch[l1Batch.length - 1]}`);
for (const block of blocks) {
l1BlockTimestamps[block.number.toString()] = block.timestamp;
}
}

// Map from l2 block number to the l1 block in which it was submitted
const l2BlockSubmissions: Record<string, bigint> = {};
for (const blockEvent of blockEvents) {
l2BlockSubmissions[blockEvent.args.blockNumber.toString()] = blockEvent.blockNumber;
}

// Now calculate stats
const stats = groupBy(events, 'proverId');
log(`prover_id, blocks_proven_within_timeout, total_blocks_proven, avg_proving_time`);
for (const proverId in stats) {
const blocks = stats[proverId].map(e => {
const provenTimestamp = l1BlockTimestamps[e.l1BlockNumber.toString()];
const uploadedBlockNumber = l2BlockSubmissions[e.l2BlockNumber.toString()];
const uploadedTimestamp = l1BlockTimestamps[uploadedBlockNumber.toString()];
const provingTime = provenTimestamp - uploadedTimestamp;
debugLog.debug(
`prover=${e.proverId} blockNumber=${e.l2BlockNumber} uploaded=${uploadedTimestamp} proven=${provenTimestamp} time=${provingTime}`,
);
return { provenTimestamp, uploadedTimestamp, provingTime, ...e };
});

const withinTimeout = blocks.filter(b => b.provingTime <= provingTimeout);
const uniqueBlocksWithinTimeout = new Set(withinTimeout.map(e => e.l2BlockNumber));
const uniqueBlocks = new Set(stats[proverId].map(e => e.l2BlockNumber));
const avgProvingTime = Math.ceil(Number(blocks.reduce((acc, b) => acc + b.provingTime, 0n)) / blocks.length);

log(`${proverId}, ${uniqueBlocksWithinTimeout.size}, ${uniqueBlocks.size}, ${avgProvingTime}`);
}
return;
}

async function getL2ProofVerifiedEvents(
startBlock: bigint,
lastBlockNum: bigint,
batchSize: bigint,
debugLog: Logger,
publicClient: PublicClient,
rollup: EthAddress,
) {
let blockNum = startBlock;
const events = [];
while (blockNum <= lastBlockNum) {
const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize;
debugLog.verbose(`Querying events from block ${blockNum} to ${end}`);
const newEvents = await retrieveL2ProofVerifiedEvents(publicClient, rollup, blockNum, end);
events.push(...newEvents);
debugLog.verbose(`Got ${newEvents.length} events`);
debugLog.verbose(`Got ${newEvents.length} events querying l2 proof verified from block ${blockNum} to ${end}`);
blockNum += batchSize;
}
return events;
}

const stats = groupBy(events, 'proverId');
for (const proverId in stats) {
log(`${proverId}, ${stats[proverId].length}`);
async function getL2BlockEvents(
startBlock: bigint,
lastBlockNum: bigint,
batchSize: bigint,
debugLog: Logger,
publicClient: PublicClient,
rollup: EthAddress,
) {
let blockNum = startBlock;
const events = [];
while (blockNum <= lastBlockNum) {
const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize;
const newEvents = await getL2BlockProcessedLogs(publicClient, rollup, blockNum, end);
events.push(...newEvents);
debugLog.verbose(`Got ${newEvents.length} events querying l2 block submitted from block ${blockNum} to ${end}`);
blockNum += batchSize;
}
return events;
}
2 changes: 2 additions & 0 deletions yarn-project/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -462,6 +462,7 @@ __metadata:
"@jest/globals": ^29.5.0
"@libp2p/peer-id-factory": ^3.0.4
"@types/jest": ^29.5.0
"@types/lodash.chunk": ^4.2.9
"@types/lodash.groupby": ^4.6.9
"@types/lodash.startcase": ^4.4.7
"@types/node": ^18.7.23
Expand All @@ -470,6 +471,7 @@ __metadata:
commander: ^12.1.0
jest: ^29.5.0
jest-mock-extended: ^3.0.5
lodash.chunk: ^4.2.0
lodash.groupby: ^4.6.0
semver: ^7.5.4
solc: ^0.8.26
Expand Down

0 comments on commit 7a1ed69

Please sign in to comment.