From da6fc38d97863b81fefb3bae0a452df09df24e9e Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 28 Sep 2023 12:07:09 -0300 Subject: [PATCH 01/22] Logger functions now accept a string instead of varargs --- .../src/uniswap_trade_on_l1_from_l2.test.ts | 8 ++-- yarn-project/cli/src/index.ts | 3 +- yarn-project/end-to-end/src/canary/cli.ts | 9 ++-- .../end-to-end/src/cli_docs_sandbox.test.ts | 9 ++-- .../src/e2e_sandbox_example.test.ts | 4 +- .../src/fixtures/cross_chain_test_harness.ts | 2 +- .../src/integration_archiver_l1_to_l2.test.ts | 2 +- .../src/uniswap_trade_on_l1_from_l2.test.ts | 8 ++-- .../foundation/src/fifo/memory_fifo.ts | 2 +- .../src/json-rpc/client/json_rpc_client.ts | 9 ++-- .../src/json-rpc/server/json_proxy.ts | 8 ++-- yarn-project/foundation/src/log/index.ts | 2 +- .../foundation/src/log/log_history.test.ts | 20 ++++----- yarn-project/foundation/src/log/logger.ts | 44 ++++++++++++------- .../transport/dispatch/create_dispatch_fn.ts | 4 +- .../src/transport/transport_client.ts | 5 ++- .../foundation/src/wasm/wasm_module.ts | 6 +-- .../noir-contracts/src/scripts/copy_output.ts | 3 +- yarn-project/p2p/src/bootstrap/bootstrap.ts | 7 +-- .../pxe/src/synchronizer/synchronizer.ts | 4 +- .../src/publisher/l1-publisher.ts | 2 +- .../src/publisher/viem-tx-sender.ts | 2 +- .../src/sequencer/sequencer.ts | 3 +- .../l2_block_downloader.ts | 2 +- 24 files changed, 95 insertions(+), 73 deletions(-) diff --git a/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts b/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts index 8ae683ebfee..58fcb9d6797 100644 --- a/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts +++ b/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts @@ -434,10 +434,10 @@ describe('uniswap_trade_on_l1_from_l2', () => { const wethL2BalanceAfterSwap = await getL2PrivateBalanceOf(ownerAddress, wethL2Contract); const daiL2BalanceAfterSwap = await getL2PrivateBalanceOf(ownerAddress, daiL2Contract); - logger('WETH balance before swap: ', wethL2BalanceBeforeSwap.toString()); - logger('DAI balance before swap : ', daiL2BalanceBeforeSwap.toString()); + logger('WETH balance before swap: ' + wethL2BalanceBeforeSwap.toString()); + logger('DAI balance before swap : ' + daiL2BalanceBeforeSwap.toString()); logger('***** 🧚‍♀️ SWAP L2 assets on L1 Uniswap 🧚‍♀️ *****'); - logger('WETH balance after swap : ', wethL2BalanceAfterSwap.toString()); - logger('DAI balance after swap : ', daiL2BalanceAfterSwap.toString()); + logger('WETH balance after swap : ' + wethL2BalanceAfterSwap.toString()); + logger('DAI balance after swap : ' + daiL2BalanceAfterSwap.toString()); }, 140_000); }); diff --git a/yarn-project/cli/src/index.ts b/yarn-project/cli/src/index.ts index 962acb0aa61..e0e190af6bf 100644 --- a/yarn-project/cli/src/index.ts +++ b/yarn-project/cli/src/index.ts @@ -19,6 +19,7 @@ import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; import { Command, Option } from 'commander'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; +import { format } from 'util'; import { mnemonicToAccount } from 'viem/accounts'; import { createCompatibleClient } from './client.js'; @@ -440,7 +441,7 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { const client = await createCompatibleClient(options.rpcUrl, debugLogger); const from = await getTxSender(client, options.from); const result = await client.viewTx(functionName, functionArgs, options.contractAddress, from); - log('\nView result: ', result, '\n'); + log(format('\nView result: ', result, '\n')); }); program diff --git a/yarn-project/end-to-end/src/canary/cli.ts b/yarn-project/end-to-end/src/canary/cli.ts index 2bac457365d..79bbbfb766b 100644 --- a/yarn-project/end-to-end/src/canary/cli.ts +++ b/yarn-project/end-to-end/src/canary/cli.ts @@ -2,7 +2,6 @@ import { AztecAddress, CompleteAddress, DebugLogger, Fr, PXE, computeMessageSecr import { getProgram } from '@aztec/cli'; import stringArgv from 'string-argv'; -import { format } from 'util'; const INITIAL_BALANCE = 33000; const TRANSFER_BALANCE = 3000; @@ -19,16 +18,16 @@ export const cliTestSuite = ( let pxe: PXE; let existingAccounts: CompleteAddress[]; let contractAddress: AztecAddress; - let log: (...args: any[]) => void; + let log: (msg: string) => void; // All logs emitted by the cli will be collected here, and reset between tests const logs: string[] = []; beforeAll(async () => { pxe = await setup(); - log = (...args: any[]) => { - logs.push(format(...args)); - debug(...args); + log = (msg: string) => { + logs.push(msg); + debug(msg); }; }, 30_000); diff --git a/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts b/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts index bedab689aef..a063df29739 100644 --- a/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts +++ b/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts @@ -3,7 +3,6 @@ import { getProgram } from '@aztec/cli'; import { TxHash } from '@aztec/types'; import stringArgv from 'string-argv'; -import { format } from 'util'; const debug = createDebugLogger('aztec:e2e_cli'); @@ -11,15 +10,15 @@ const { SANDBOX_URL = 'http://localhost:8080' } = process.env; describe('CLI docs sandbox', () => { let cli: ReturnType; - let log: (...args: any[]) => void; + let log: (msg: string) => void; // All logs emitted by the cli will be collected here, and reset between tests const logs: string[] = []; beforeAll(async () => { - log = (...args: any[]) => { - logs.push(format(...args)); - debug(...args); + log = (msg: string) => { + logs.push(msg); + debug(msg); }; await waitForSandboxWithCli(); diff --git a/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts b/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts index 665dc3714ad..3de8d16843f 100644 --- a/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts +++ b/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts @@ -13,6 +13,8 @@ import { import { GrumpkinScalar } from '@aztec/circuits.js'; import { TokenContract } from '@aztec/noir-contracts/types'; +import { format } from 'util'; + const { SANDBOX_URL = 'http://localhost:8080' } = process.env; // docs:end:imports @@ -29,7 +31,7 @@ describe('e2e_sandbox_example', () => { const nodeInfo = await pxe.getNodeInfo(); - logger('Aztec Sandbox Info ', nodeInfo); + logger(format('Aztec Sandbox Info ', nodeInfo)); // docs:end:setup expect(typeof nodeInfo.protocolVersion).toBe('number'); diff --git a/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts index 59aa1735331..aa46c47521a 100644 --- a/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts @@ -132,7 +132,7 @@ export class CrossChainTestHarness { this.logger("Generating a claim secret using pedersen's hash function"); const secret = Fr.random(); const secretHash = await computeMessageSecretHash(secret); - this.logger('Generated claim secret: ', secretHash.toString(true)); + this.logger('Generated claim secret: ' + secretHash.toString(true)); return [secret, secretHash]; } diff --git a/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts b/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts index 7c0878ae32c..f7816061288 100644 --- a/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts +++ b/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts @@ -81,7 +81,7 @@ describe('archiver integration with l1 to l2 messages', () => { const secret = Fr.random(); const secretHash = await computeMessageSecretHash(secret); const secretString = `0x${secretHash.toBuffer().toString('hex')}` as `0x${string}`; - logger('Generated claim secret: ', secretString); + logger('Generated claim secret: ' + secretString); logger('Minting tokens on L1'); await underlyingERC20.write.mint([ethAccount.toString(), 1000000n], {} as any); diff --git a/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts b/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts index 583a692b84e..feb46eeb440 100644 --- a/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts +++ b/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts @@ -272,11 +272,11 @@ describe('uniswap_trade_on_l1_from_l2', () => { const wethL2BalanceAfterSwap = await wethCrossChainHarness.getL2PrivateBalanceOf(ownerAddress); const daiL2BalanceAfterSwap = await daiCrossChainHarness.getL2PrivateBalanceOf(ownerAddress); - logger('WETH balance before swap: ', wethL2BalanceBeforeSwap.toString()); - logger('DAI balance before swap : ', daiL2BalanceBeforeSwap.toString()); + logger('WETH balance before swap: ' + wethL2BalanceBeforeSwap.toString()); + logger('DAI balance before swap : ' + daiL2BalanceBeforeSwap.toString()); logger('***** 🧚‍♀️ SWAP L2 assets on L1 Uniswap 🧚‍♀️ *****'); - logger('WETH balance after swap : ', wethL2BalanceAfterSwap.toString()); - logger('DAI balance after swap : ', daiL2BalanceAfterSwap.toString()); + logger('WETH balance after swap : ' + wethL2BalanceAfterSwap.toString()); + logger('DAI balance after swap : ' + daiL2BalanceAfterSwap.toString()); }, 140_000); it('should uniswap trade on L1 from L2 funds publicly (swaps WETH -> DAI)', async () => { diff --git a/yarn-project/foundation/src/fifo/memory_fifo.ts b/yarn-project/foundation/src/fifo/memory_fifo.ts index c6233075dae..95f1b95fa77 100644 --- a/yarn-project/foundation/src/fifo/memory_fifo.ts +++ b/yarn-project/foundation/src/fifo/memory_fifo.ts @@ -108,7 +108,7 @@ export class MemoryFifo { await handler(item); } } catch (err) { - this.log.error('Queue handler exception:', err); + this.log.error('Queue handler exception', err); } } } diff --git a/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts b/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts index 39ddefae1a1..240db195b1e 100644 --- a/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts +++ b/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts @@ -3,6 +3,7 @@ // This takes a {foo(): T} and makes {foo(): Promise} // while avoiding Promise of Promise. import { RemoteObject } from 'comlink'; +import { format } from 'util'; import { DebugLogger, createDebugLogger } from '../../log/index.js'; import { NoRetryError, makeBackoff, retry } from '../../retry/index.js'; @@ -29,7 +30,7 @@ export async function defaultFetch( useApiEndpoints: boolean, noRetry = false, ) { - debug(`JsonRpcClient.fetch`, host, rpcMethod, '->', body); + debug(format(`JsonRpcClient.fetch`, host, rpcMethod, '->', body)); let resp: Response; if (useApiEndpoints) { resp = await fetch(`${host}/${rpcMethod}`, { @@ -104,9 +105,9 @@ export function createJsonRpcClient( method, params: params.map(param => convertToJsonObj(classConverter, param)), }; - debug(`JsonRpcClient.request`, method, '<-', params); + debug(format(`JsonRpcClient.request`, method, '<-', params)); const res = await fetch(host, method, body, useApiEndpoints); - debug(`JsonRpcClient.result`, method, '->', res); + debug(format(`JsonRpcClient.result`, method, '->', res)); if (res.error) { throw res.error; } @@ -124,7 +125,7 @@ export function createJsonRpcClient( get: (target, rpcMethod: string) => { if (['then', 'catch'].includes(rpcMethod)) return Reflect.get(target, rpcMethod); return (...params: any[]) => { - debug(`JsonRpcClient.constructor`, 'proxy', rpcMethod, '<-', params); + debug(format(`JsonRpcClient.constructor`, 'proxy', rpcMethod, '<-', params)); return request(rpcMethod, params); }; }, diff --git a/yarn-project/foundation/src/json-rpc/server/json_proxy.ts b/yarn-project/foundation/src/json-rpc/server/json_proxy.ts index 65134740e05..e7630d7f5e8 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_proxy.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_proxy.ts @@ -1,3 +1,5 @@ +import { format } from 'util'; + import { createDebugLogger } from '../../log/index.js'; import { ClassConverter, JsonClassConverterInput, StringClassConverterInput } from '../class_converter.js'; import { convertFromJsonObj, convertToJsonObj } from '../convert.js'; @@ -25,17 +27,17 @@ export class JsonProxy { * @returns The remote result. */ public async call(methodName: string, jsonParams: any[] = []) { - debug(`JsonProxy:call`, methodName, jsonParams); + debug(format(`JsonProxy:call`, methodName, jsonParams)); // Get access to our class members const proto = Object.getPrototypeOf(this.handler); assert(hasOwnProperty(proto, methodName), `JsonProxy: Method ${methodName} not found!`); assert(Array.isArray(jsonParams), 'JsonProxy: Params not an array!'); // convert the params from json representation to classes const convertedParams = jsonParams.map(param => convertFromJsonObj(this.classConverter, param)); - debug('JsonProxy:call', methodName, '<-', convertedParams); + debug(format('JsonProxy:call', methodName, '<-', convertedParams)); const rawRet = await (this.handler as any)[methodName](...convertedParams); const ret = convertToJsonObj(this.classConverter, rawRet); - debug('JsonProxy:call', methodName, '->', ret); + debug(format('JsonProxy:call', methodName, '->', ret)); return ret; } } diff --git a/yarn-project/foundation/src/log/index.ts b/yarn-project/foundation/src/log/index.ts index c9c747a50b1..6e42fe3c4b5 100644 --- a/yarn-project/foundation/src/log/index.ts +++ b/yarn-project/foundation/src/log/index.ts @@ -1,7 +1,7 @@ /** * A callable logger instance. */ -export type LogFn = (...args: any[]) => void; +export type LogFn = (msg: string) => void; export * from './console.js'; export * from './debug.js'; diff --git a/yarn-project/foundation/src/log/log_history.test.ts b/yarn-project/foundation/src/log/log_history.test.ts index 5c63351fafa..4db9e5276f4 100644 --- a/yarn-project/foundation/src/log/log_history.test.ts +++ b/yarn-project/foundation/src/log/log_history.test.ts @@ -6,7 +6,7 @@ import { LogHistory } from './log_history.js'; jest.useFakeTimers({ doNotFake: ['performance'] }); describe('log history', () => { - let debug: (...any: any) => void; + let debug: (msg: string) => void; let logHistory: LogHistory; const timestemp = new Date().toISOString(); const name = 'test:a'; @@ -21,18 +21,18 @@ describe('log history', () => { logHistory.enable(); expect(logHistory.getLogs()).toEqual([]); debug('0'); - debug('1', 2); - debug('2', { key: ['value'] }, Buffer.alloc(2)); + debug('1'); + debug('2'); expect(logHistory.getLogs()).toEqual([ [timestemp, name, '0'], - [timestemp, name, '1', 2], - [timestemp, name, '2', { key: ['value'] }, Buffer.alloc(2)], + [timestemp, name, '1'], + [timestemp, name, '2'], ]); }); it('does not keep logs if not enabled', () => { debug('0'); - debug('1', 2); + debug('1'); expect(logHistory.getLogs()).toEqual([]); }); @@ -59,12 +59,12 @@ describe('log history', () => { expect(logHistory.getLogs()).toEqual([[timestemp, name, '0']]); enableLogs(`${name},${name2}`); - debug('1', 2); - debug2('one', 3); + debug('1'); + debug2('one'); expect(logHistory.getLogs()).toEqual([ [timestemp, name, '0'], - [timestemp, name, '1', 2], - [timestemp, name2, 'one', 3], + [timestemp, name, '1'], + [timestemp, name2, 'one'], ]); }); diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts index 4e1d0c0f06d..9d5d1ea8602 100644 --- a/yarn-project/foundation/src/log/logger.ts +++ b/yarn-project/foundation/src/log/logger.ts @@ -16,10 +16,13 @@ type LogLevel = (typeof LogLevels)[number]; const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() as LogLevel; const currentLevel = LogLevels.includes(envLogLevel) ? envLogLevel : DefaultLogLevel; +/** Log function that accepts an exception object */ +type ErrorLogFn = (msg: string, err?: Error | unknown) => void; + /** * Logger that supports multiple severity levels. */ -export type Logger = { [K in LogLevel]: LogFn }; +export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn }; /** * Logger that supports multiple severity levels and can be called directly to issue a debug statement. @@ -40,17 +43,17 @@ export function createDebugLogger(name: string): DebugLogger { const logger = { silent: () => {}, - error: (...args: any[]) => logWithDebug(debugLogger, 'error', args), - warn: (...args: any[]) => logWithDebug(debugLogger, 'warn', args), - info: (...args: any[]) => logWithDebug(debugLogger, 'info', args), - verbose: (...args: any[]) => logWithDebug(debugLogger, 'verbose', args), - debug: (...args: any[]) => logWithDebug(debugLogger, 'debug', args), + error: (msg: string, err?: Error | unknown) => logWithDebug(debugLogger, 'error', formatError(msg, err)), + warn: (msg: string) => logWithDebug(debugLogger, 'warn', msg), + info: (msg: string) => logWithDebug(debugLogger, 'info', msg), + verbose: (msg: string) => logWithDebug(debugLogger, 'verbose', msg), + debug: (msg: string) => logWithDebug(debugLogger, 'debug', msg), }; - return Object.assign((...args: any[]) => logWithDebug(debugLogger, 'debug', args), logger); + return Object.assign((msg: string) => logWithDebug(debugLogger, 'debug', msg), logger); } /** A callback to capture all logs. */ -export type LogHandler = (level: LogLevel, namespace: string, args: any[]) => void; +export type LogHandler = (level: LogLevel, namespace: string, msg: string) => void; const logHandlers: LogHandler[] = []; @@ -68,14 +71,14 @@ export function onLog(handler: LogHandler) { * @param level - Intended log level. * @param args - Args to log. */ -function logWithDebug(debug: debug.Debugger, level: LogLevel, args: any[]) { +function logWithDebug(debug: debug.Debugger, level: LogLevel, msg: string) { for (const handler of logHandlers) { - handler(level, debug.namespace, args); + handler(level, debug.namespace, msg); } if (debug.enabled) { - debug(args[0], ...args.slice(1)); + debug(msg); } else if (LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel) && process.env.NODE_ENV !== 'test') { - printLog([getPrefix(debug, level), ...args]); + printLog(`${getPrefix(debug, level)} ${msg}`); } } @@ -96,9 +99,20 @@ function getPrefix(debugLogger: debug.Debugger, level: LogLevel) { /** * Outputs to console error. - * @param args - Args to log. + * @param msg - What to log. */ -function printLog(args: any[]) { +function printLog(msg: string) { // eslint-disable-next-line no-console - console.error(...args); + console.error(msg); +} + +/** + * Concatenates a log message and an exception. + * @param msg - Log message + * @param err - Error to log + * @returns A string with both the log message and the error message. + */ +function formatError(msg: string, err?: Error | unknown): string { + const errStr = err && [(err as Error).name, (err as Error).message].filter(x => !!x).join(' '); + return err ? `${msg}: ${errStr || err}` : msg; } diff --git a/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts b/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts index fef2b98bb2e..43276da87c4 100644 --- a/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts +++ b/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts @@ -1,3 +1,5 @@ +import { format } from 'util'; + import { createDebugLogger } from '../../log/index.js'; /** @@ -27,7 +29,7 @@ export interface DispatchMsg { export function createDispatchFn(targetFn: () => any, debug = createDebugLogger('aztec:foundation:dispatch')) { return async ({ fn, args }: DispatchMsg) => { const target = targetFn(); - debug(`dispatching to ${target}: ${fn}`, args); + debug(format(`dispatching to ${target}: ${fn}`, args)); return await target[fn](...args); }; } diff --git a/yarn-project/foundation/src/transport/transport_client.ts b/yarn-project/foundation/src/transport/transport_client.ts index 5fc5b631da2..3fb2fd836d5 100644 --- a/yarn-project/foundation/src/transport/transport_client.ts +++ b/yarn-project/foundation/src/transport/transport_client.ts @@ -1,4 +1,5 @@ import EventEmitter from 'events'; +import { format } from 'util'; import { createDebugLogger } from '../log/index.js'; import { EventMessage, ResponseMessage, isEventMessage } from './dispatch/messages.js'; @@ -91,7 +92,7 @@ export class TransportClient extends EventEmitter { } const msgId = this.msgId++; const msg = { msgId, payload }; - debug(`->`, msg); + debug(format(`->`, msg)); return new Promise((resolve, reject) => { this.pendingRequests.push({ resolve, reject, msgId }); this.socket!.send(msg, transfer).catch(reject); @@ -111,7 +112,7 @@ export class TransportClient extends EventEmitter { this.close(); return; } - debug(`<-`, msg); + debug(format(`<-`, msg)); if (isEventMessage(msg)) { this.emit('event_msg', msg.payload); return; diff --git a/yarn-project/foundation/src/wasm/wasm_module.ts b/yarn-project/foundation/src/wasm/wasm_module.ts index c673cd786c5..3a3490a90e8 100644 --- a/yarn-project/foundation/src/wasm/wasm_module.ts +++ b/yarn-project/foundation/src/wasm/wasm_module.ts @@ -149,9 +149,9 @@ export class WasmModule implements IWasmModule { */ public addLogger(logger: LogFn) { const oldDebug = this.debug; - this.debug = (...args: any[]) => { - logger(...args); - oldDebug(...args); + this.debug = (msg: string) => { + logger(msg); + oldDebug(msg); }; } diff --git a/yarn-project/noir-contracts/src/scripts/copy_output.ts b/yarn-project/noir-contracts/src/scripts/copy_output.ts index 53cf474a8df..15b5dede446 100644 --- a/yarn-project/noir-contracts/src/scripts/copy_output.ts +++ b/yarn-project/noir-contracts/src/scripts/copy_output.ts @@ -12,6 +12,7 @@ import omit from 'lodash.omit'; import snakeCase from 'lodash.snakecase'; import upperFirst from 'lodash.upperfirst'; import { join as pathJoin } from 'path'; +import { format } from 'util'; // const STATEMENT_TYPES = ['type', 'params', 'return'] as const; const log = createConsoleLogger('aztec:noir-contracts'); @@ -103,6 +104,6 @@ const main = () => { try { main(); } catch (err: unknown) { - log(err); + log(format(`Error copying build output`, err)); process.exit(1); } diff --git a/yarn-project/p2p/src/bootstrap/bootstrap.ts b/yarn-project/p2p/src/bootstrap/bootstrap.ts index e57e9596263..5c3dc673f06 100644 --- a/yarn-project/p2p/src/bootstrap/bootstrap.ts +++ b/yarn-project/p2p/src/bootstrap/bootstrap.ts @@ -8,6 +8,7 @@ import { mplex } from '@libp2p/mplex'; import { tcp } from '@libp2p/tcp'; import { Libp2p, Libp2pOptions, ServiceFactoryMap, createLibp2p } from 'libp2p'; import { identifyService } from 'libp2p/identify'; +import { format } from 'util'; import { P2PConfig } from '../config.js'; import { createLibP2PPeerId } from '../index.js'; @@ -74,15 +75,15 @@ export class BootstrapNode { }); this.node.addEventListener('peer:discovery', evt => { - this.logger('Discovered %s', evt.detail.id.toString()); // Log discovered peer + this.logger(format('Discovered %s', evt.detail.id.toString())); // Log discovered peer }); this.node.addEventListener('peer:connect', evt => { - this.logger('Connected to %s', evt.detail.toString()); // Log connected peer + this.logger(format('Connected to %s', evt.detail.toString())); // Log connected peer }); this.node.addEventListener('peer:disconnect', evt => { - this.logger('Disconnected from %s', evt.detail.toString()); // Log connected peer + this.logger(format('Disconnected from %s', evt.detail.toString())); // Log connected peer }); } diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index 8a6796de435..d3deb73eab9 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -128,7 +128,7 @@ export class Synchronizer { this.synchedToBlock = latestBlock.block.number; } catch (err) { - this.log.error(err); + this.log.error(`Error in synchronizer work`, err); await this.interruptableSleep.sleep(retryInterval); } } @@ -182,7 +182,7 @@ export class Synchronizer { this.noteProcessors.push(noteProcessor); } } catch (err) { - this.log.error(err); + this.log.error(`Error in synchronizer workNoteProcessorCatchUp`, err); await this.interruptableSleep.sleep(retryInterval); } } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index e22880c6cc6..5055fd0dd49 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -210,7 +210,7 @@ export class L1Publisher implements L2BlockReceiver { try { return await this.txSender.sendProcessTx(encodedData); } catch (err) { - this.log(`ROLLUP PUBLISH FAILED`, err); + this.log.error(`Rollup publish failed`, err); return undefined; } } diff --git a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts index 2e087b723fe..6c9b1eecf1f 100644 --- a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts +++ b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts @@ -88,7 +88,7 @@ export class ViemTxSender implements L1PublisherTxSender { }; } - this.log('Receipt not found for tx hash', txHash); + this.log(`Receipt not found for tx hash ${txHash}`); return undefined; } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 487b227f5d1..e7716b039d9 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -173,8 +173,7 @@ export class Sequencer { await this.publishL2Block(block); this.log.info(`Submitted rollup block ${block.number} with ${processedValidTxs.length} transactions`); } catch (err) { - this.log.error(err); - this.log.error(`Rolling back world state DB`); + this.log.error(`Rolling back world state DB due to error assembling block`, err); await this.worldState.getLatest().rollback(); } } diff --git a/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts b/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts index ccb051cd70e..5b0f60d6049 100644 --- a/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts +++ b/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts @@ -43,7 +43,7 @@ export class L2BlockDownloader { await this.jobQueue.put(() => this.collectBlocks()); await this.interruptableSleep.sleep(this.pollIntervalMS); } catch (err) { - log.error(err); + log.error(`Error downloading L2 block`, err); await this.interruptableSleep.sleep(this.pollIntervalMS); } } From 6cca04b6edc38b058125451a02ef06a9ae7d1cf4 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 17:29:39 -0300 Subject: [PATCH 02/22] Collect benchmarks from e2e tests --- .circleci/config.yml | 62 +++++++++++ scripts/ci/aggregate_e2e_benchmark.js | 101 ++++++++++++++++++ scripts/ci/assemble_e2e_benchmark.sh | 28 +++++ .../aztec-node/src/aztec-node/server.ts | 34 +++--- yarn-project/aztec-sandbox/src/logging.ts | 5 +- yarn-project/end-to-end/package.json | 6 +- .../docker-compose-e2e-sandbox-browser.yml | 3 + .../scripts/docker-compose-e2e-sandbox.yml | 3 + .../end-to-end/scripts/docker-compose-p2p.yml | 4 +- .../end-to-end/scripts/docker-compose.yml | 4 +- .../end-to-end/scripts/run_tests_local | 3 + .../end-to-end/scripts/upload_logs_to_s3.sh | 18 ++++ .../benchmarks/bench_publish_rollup.test.ts | 69 ++++++++++++ .../end-to-end/src/fixtures/logging.ts | 34 ++++++ yarn-project/end-to-end/src/fixtures/utils.ts | 83 +++++++------- .../src/uniswap_trade_on_l1_from_l2.test.ts | 2 +- yarn-project/foundation/src/log/index.ts | 9 +- yarn-project/foundation/src/log/logger.ts | 46 +++++--- .../sequencer-client/src/publisher/index.ts | 28 +++++ .../src/publisher/l1-publisher.test.ts | 2 +- .../src/publisher/l1-publisher.ts | 62 ++++++++--- .../src/publisher/viem-tx-sender.ts | 32 +++++- .../src/sequencer/sequencer.ts | 9 +- .../types/src/logs/l2_block_l2_logs.ts | 13 +++ yarn-project/yarn.lock | 12 +++ 25 files changed, 567 insertions(+), 105 deletions(-) create mode 100644 scripts/ci/aggregate_e2e_benchmark.js create mode 100755 scripts/ci/assemble_e2e_benchmark.sh create mode 100755 yarn-project/end-to-end/scripts/upload_logs_to_s3.sh create mode 100644 yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts create mode 100644 yarn-project/end-to-end/src/fixtures/logging.ts diff --git a/.circleci/config.yml b/.circleci/config.yml index b05b90cf172..96343056735 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -530,6 +530,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_2_pxes.test.ts + environment: { DEBUG: 'aztec:*' } e2e-multiple-accounts-1-enc-key: machine: @@ -541,6 +542,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_multiple_accounts_1_enc_key.test.ts + environment: { DEBUG: 'aztec:*' } e2e-deploy-contract: machine: @@ -552,6 +554,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_deploy_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-lending-contract: machine: @@ -563,6 +566,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_lending_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-token-contract: machine: @@ -574,6 +578,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_token_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-private-airdrop: machine: @@ -585,6 +590,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_private_airdrop.test.ts + environment: { DEBUG: 'aztec:*' } e2e-private-token-contract: machine: @@ -596,6 +602,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_private_token_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-sandbox-example: machine: @@ -607,6 +614,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_sandbox_example.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } e2e-multi-transfer-contract: machine: @@ -618,6 +626,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_multi_transfer.test.ts + environment: { DEBUG: 'aztec:*' } e2e-block-building: machine: @@ -629,6 +638,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_block_building.test.ts + environment: { DEBUG: 'aztec:*' } e2e-nested-contract: machine: @@ -640,6 +650,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_nested_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-non-contract-account: machine: @@ -651,6 +662,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_non_contract_account.test.ts + environment: { DEBUG: 'aztec:*' } e2e-cross-chain-messaging: machine: @@ -662,6 +674,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_cross_chain_messaging.test.ts + environment: { DEBUG: 'aztec:*' } e2e-public-cross-chain-messaging: machine: @@ -673,6 +686,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_public_cross_chain_messaging.test.ts + environment: { DEBUG: 'aztec:*' } e2e-public-to-private-messaging: machine: @@ -684,6 +698,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_public_to_private_messaging.test.ts + environment: { DEBUG: 'aztec:*' } e2e-account-contracts: machine: @@ -695,6 +710,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_account_contracts.test.ts + environment: { DEBUG: 'aztec:*' } e2e-escrow-contract: machine: @@ -706,6 +722,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_escrow_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-pending-commitments-contract: machine: @@ -717,6 +734,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_pending_commitments_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-ordering: machine: @@ -728,6 +746,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_ordering.test.ts + environment: { DEBUG: 'aztec:*' } uniswap-trade-on-l1-from-l2: machine: @@ -739,6 +758,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local uniswap_trade_on_l1_from_l2.test.ts + environment: { DEBUG: 'aztec:*' } integration-archiver-l1-to-l2: machine: @@ -750,6 +770,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local integration_archiver_l1_to_l2.test.ts + environment: { DEBUG: 'aztec:*' } integration-l1-publisher: machine: @@ -761,6 +782,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local integration_l1_publisher.test.ts + environment: { DEBUG: 'aztec:*' } e2e-public-token-contract: machine: @@ -772,6 +794,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_public_token_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-cli: machine: @@ -783,6 +806,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_cli.test.ts + environment: { DEBUG: 'aztec:*' } e2e-p2p: machine: @@ -794,6 +818,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_p2p_network.test.ts + environment: { DEBUG: 'aztec:*' } e2e-browser-sandbox: machine: @@ -805,6 +830,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_aztec_js_browser.test.ts ./scripts/docker-compose-e2e-sandbox-browser.yml + environment: { DEBUG: 'aztec:*' } e2e-card-game: machine: @@ -816,6 +842,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_card_game.test.ts + environment: { DEBUG: 'aztec:*' } pxe-sandbox: machine: @@ -827,6 +854,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local pxe_sandbox.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } cli-docs-sandbox: machine: @@ -838,6 +866,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local cli_docs_sandbox.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } guides-writing-an-account-contract: machine: @@ -849,6 +878,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local guides/writing_an_account_contract.test.ts + environment: { DEBUG: 'aztec:*' } guides-dapp-testing: machine: @@ -860,6 +890,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local guides/dapp_testing.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } guides-sample-dapp: machine: @@ -871,6 +902,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local sample-dapp ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } guides-up-quick-start: machine: @@ -882,6 +914,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local guides/up_quick_start.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } e2e-canary-test: machine: @@ -894,6 +927,18 @@ jobs: name: "Test" command: cond_run_script canary-build ./scripts/run_tests uniswap_trade_on_l1_from_l2.test.ts canary-build ./scripts/docker-compose-e2e-sandbox.yml + bench-publish-rollup: + machine: + image: ubuntu-2204:2023.07.2 + resource_class: large + steps: + - *checkout + - *setup_env + - run: + name: "Benchmark" + command: cond_run_script end-to-end ./scripts/run_tests_local benchmarks/bench_publish_rollup.test.ts + environment: { DEBUG: 'aztec:benchmarks:*,aztec:sequencer' } + build-docs: machine: image: ubuntu-2204:2023.07.2 @@ -926,6 +971,16 @@ jobs: name: "Noop" command: echo Noop + bench-summary: + machine: + image: ubuntu-2204:2023.07.2 + steps: + - *checkout + - *setup_env + - run: + name: "Assemble benchmark summary from uploaded logs" + command: ./scripts/ci/assemble_e2e_benchmark.sh + deploy-npm: machine: image: ubuntu-2204:2023.07.2 @@ -1220,6 +1275,7 @@ workflows: - guides-dapp-testing: *e2e_test - guides-sample-dapp: *e2e_test - guides-up-quick-start: *e2e_test + - bench-publish-rollup: *e2e_test - e2e-end: requires: @@ -1257,6 +1313,12 @@ workflows: - guides-dapp-testing - guides-sample-dapp - guides-up-quick-start + - bench-publish-rollup + <<: *defaults + + - bench-summary: + requires: + - e2e-end <<: *defaults # Deployment and Canary tests diff --git a/scripts/ci/aggregate_e2e_benchmark.js b/scripts/ci/aggregate_e2e_benchmark.js new file mode 100644 index 00000000000..82ba6c90c8f --- /dev/null +++ b/scripts/ci/aggregate_e2e_benchmark.js @@ -0,0 +1,101 @@ +// Given a local folder with the e2e benchmark files, generates a single file +// output with the grouped metrics to be published. This script can probably +// be replaced by a single call to jq, but I found this easier to write, +// and pretty much every CI comes with a working version of node. + +const fs = require("fs"); +const path = require("path"); +const readline = require("readline"); + +// Metrics to capture +const L1_ROLLUP_CALLDATA_SIZE_IN_BYTES = "l1_rollup_calldata_size_in_bytes"; +const L1_ROLLUP_CALLDATA_GAS = "l1_rollup_calldata_gas"; +const L1_ROLLUP_EXECUTION_GAS = "l1_rollup_execution_gas"; + +// Events to track +const ROLLUP_PUBLISHED_TO_L1 = "rollup-published-to-l1"; + +// Rollup sizes to track (duplicated from yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts) +const ROLLUP_SIZES = process.env.ROLLUP_SIZES + ? process.env.ROLLUP_SIZES.split(",").map(Number) + : [8, 32, 128]; + +// Folder where to load logs from +const logsDir = process.env.LOGS_DIR ?? `log`; + +// Output files +const BENCHMARK_FILE_JSON = process.env.BENCHMARK_FILE_JSON ?? "benchmark.json"; + +// Appends a datapoint to the final results for the given metric in the given bucket +function append(results, metric, bucket, value) { + if (value === undefined) { + console.error(`Undefined value for ${metric} in bucket ${bucket}`); + return; + } + const numeric = Number(value); + if (Number.isNaN(numeric)) { + console.error(`Value ${value} for ${metric} in ${bucket} is not a number`); + return; + } + if (!results[metric]) results[metric] = {}; + if (!results[metric][bucket]) results[metric][bucket] = []; + results[metric][bucket].push(numeric); +} + +// Processes an entry with event name 'rollup-published-to-l1' and updates results +function processRollupPublished(entry, results) { + const bucket = entry.txCount; + if (!ROLLUP_SIZES.includes(bucket)) return; + append(results, L1_ROLLUP_CALLDATA_GAS, bucket, entry.calldataGas); + append(results, L1_ROLLUP_CALLDATA_SIZE_IN_BYTES, bucket, entry.calldataSize); + append(results, L1_ROLLUP_EXECUTION_GAS, bucket, entry.gasUsed); +} + +// Processes a parsed entry from a logfile and updates results +function processEntry(entry, results) { + switch (entry.eventName) { + case ROLLUP_PUBLISHED_TO_L1: + return processRollupPublished(entry, results); + default: + return; + } +} + +// Parses all jsonl files downloaded and aggregates them into a single results object +async function main() { + const results = {}; + + // Get all jsonl files in the logs dir + const files = fs.readdirSync(logsDir).filter((f) => f.endsWith(".jsonl")); + + // Iterate over each .jsonl file + for (const file of files) { + const filePath = path.join(logsDir, file); + const fileStream = fs.createReadStream(filePath); + const rl = readline.createInterface({ input: fileStream }); + + for await (const line of rl) { + const entry = JSON.parse(line); + processEntry(entry, results); + } + } + + // For each bucket of each metric compute the average all collected datapoints + for (const metricName in results) { + const metric = results[metricName]; + for (const bucketName in metric) { + const bucket = metric[bucketName]; + let avg = bucket.reduce((acc, val) => acc + val, 0) / bucket.length; + if (avg > 100) avg = Math.floor(avg); + metric[bucketName] = avg; + } + } + + // Throw in a timestamp + results.timestamp = new Date().toISOString(); + + // Write results to disk + fs.writeFileSync(BENCHMARK_FILE_JSON, JSON.stringify(results, null, 2)); +} + +main(); diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh new file mode 100755 index 00000000000..b9754badfc2 --- /dev/null +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Grabs the log files uploaded in yarn-project/end-to-end/scripts/upload_logs_to_s3.sh +# that contain representative benchmarks, extracts whatever metrics are interesting, +# and assembles a single file that shows the current state of the repository. + +[ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace +set -eu + +BUCKET_NAME="aztec-ci-artifacts" +LOG_FOLDER="${LOG_FOLDER:-log}" +COMMIT_HASH="${COMMIT_HASH:-$(git rev-parse HEAD)}" +BENCHMARK_FILE_JSON="benchmark.json" + +mkdir -p $LOG_FOLDER + +aws s3 cp "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/" $LOG_FOLDER --include "bench*.jsonl" --recursive + +node scripts/ci/aggregate_e2e_benchmark.js +echo "generated: $BENCHMARK_FILE_JSON" + +aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/${BENCHMARK_FILE_JSON}" + +if [ "${CIRCLE_BRANCH:-}" = "master" ]; then + aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/benchmarks-v1/${COMMIT_HASH}.json" + aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/benchmarks-v1/master.json" +fi + + diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index f1e3f3ad607..f485f391710 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -55,19 +55,19 @@ import { openDb } from './db.js'; */ export class AztecNodeService implements AztecNode { constructor( - protected config: AztecNodeConfig, - protected p2pClient: P2P, - protected blockSource: L2BlockSource, - protected encryptedLogsSource: L2LogsSource, - protected unencryptedLogsSource: L2LogsSource, - protected contractDataSource: ContractDataSource, - protected l1ToL2MessageSource: L1ToL2MessageSource, - protected worldStateSynchronizer: WorldStateSynchronizer, - protected sequencer: SequencerClient, - protected chainId: number, - protected version: number, - protected globalVariableBuilder: GlobalVariableBuilder, - protected merkleTreesDb: levelup.LevelUp, + protected readonly config: AztecNodeConfig, + protected readonly p2pClient: P2P, + protected readonly blockSource: L2BlockSource, + protected readonly encryptedLogsSource: L2LogsSource, + protected readonly unencryptedLogsSource: L2LogsSource, + protected readonly contractDataSource: ContractDataSource, + protected readonly l1ToL2MessageSource: L1ToL2MessageSource, + protected readonly worldStateSynchronizer: WorldStateSynchronizer, + protected readonly sequencer: SequencerClient, + protected readonly chainId: number, + protected readonly version: number, + protected readonly globalVariableBuilder: GlobalVariableBuilder, + protected readonly merkleTreesDb: levelup.LevelUp, private log = createDebugLogger('aztec:node'), ) {} @@ -122,6 +122,14 @@ export class AztecNodeService implements AztecNode { ); } + /** + * Returns the sequencer client instance. + * @returns The sequencer client instance. + */ + public getSequencer(): SequencerClient { + return this.sequencer; + } + /** * Method to return the currently deployed L1 contract addresses. * @returns - The currently deployed L1 contract addresses. diff --git a/yarn-project/aztec-sandbox/src/logging.ts b/yarn-project/aztec-sandbox/src/logging.ts index 3c270417e61..5a592496a49 100644 --- a/yarn-project/aztec-sandbox/src/logging.ts +++ b/yarn-project/aztec-sandbox/src/logging.ts @@ -2,7 +2,6 @@ import { onLog } from '@aztec/foundation/log'; import * as path from 'path'; import * as process from 'process'; -import * as util from 'util'; import * as winston from 'winston'; import DailyRotateFile from 'winston-daily-rotate-file'; @@ -37,8 +36,8 @@ function createWinstonLogger() { */ export function setupFileDebugLog() { const logger = createWinstonLogger(); - onLog((level, namespace, args) => { - logger.log({ level, namespace, message: util.format(...args) }); + onLog((level, namespace, message, data) => { + logger.log({ ...data, level, namespace, message }); }); const workdir = process.env.HOST_WORKDIR ?? process.cwd(); return path.join(workdir, LOG_DIR, CURRENT_LOG_FILE_NAME); diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 73f29b7ef3c..78a811fc0ac 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -9,7 +9,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", "formatting:fix": "run -T prettier -w ./src", - "test": "DEBUG='aztec:*' NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --runInBand --passWithNoTests --testTimeout=15000", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --runInBand --passWithNoTests --testTimeout=15000", "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --no-cache --runInBand --config jest.integration.config.json" }, @@ -65,10 +65,12 @@ "ts-node": "^10.9.1", "tslib": "^2.4.0", "typescript": "^5.0.4", - "viem": "^1.2.5" + "viem": "^1.2.5", + "winston": "^3.10.0" }, "devDependencies": { "@rushstack/eslint-patch": "^1.1.4", + "@types/fs-extra": "^11.0.2", "@types/lodash.compact": "^3.0.7", "concurrently": "^7.6.0" }, diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml index 54424e6c93e..98197fbdffd 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml @@ -31,8 +31,11 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 SANDBOX_URL: http://sandbox:8080 SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} entrypoint: ['./scripts/start_e2e_ci_browser.sh', './src/e2e_aztec_js_browser.test.ts'] + volumes: + - ./log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml index 53eb4fde6d2..331263921b5 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml @@ -31,8 +31,11 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 SANDBOX_URL: http://sandbox:8080 SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} command: ${TEST:-./src/e2e_deploy_contract.test.ts} + volumes: + - ./log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml index 851641e664b..b6481251ff0 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml @@ -21,7 +21,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: - DEBUG: 'aztec:*' + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL: 500 @@ -36,3 +36,5 @@ services: P2P_ENABLED: 'true' BOOTSTRAP_NODES: '/ip4/p2p-bootstrap/tcp/40400/p2p/12D3KooWGBpbC6qQFkaCYphjNeY6sV99o4SnEWyTeBigoVriDn4D' command: ${TEST:-./src/e2e_p2p_network.test.ts} + volumes: + - ./log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index a20f4ffb02f..90d90ee559a 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -15,7 +15,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: - DEBUG: 'aztec:*' + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 @@ -26,3 +26,5 @@ services: ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} command: ${TEST:-./src/e2e_deploy_contract.test.ts} + volumes: + - ./log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/run_tests_local b/yarn-project/end-to-end/scripts/run_tests_local index c17d582371d..5b223bceea4 100755 --- a/yarn-project/end-to-end/scripts/run_tests_local +++ b/yarn-project/end-to-end/scripts/run_tests_local @@ -19,3 +19,6 @@ done docker-compose -f $COMPOSE_FILE rm -f docker-compose -f $COMPOSE_FILE up --exit-code-from end-to-end + +# Upload the logs with metrics to S3 +upload_logs_to_s3 \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh new file mode 100755 index 00000000000..1fe7a66c1f2 --- /dev/null +++ b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# Uploads to S3 the contents of the log file mounted on the end-to-end container, +# which contains log entries with an associated event and metrics for it. +# Logs are uploaded to aztec-ci-artifacts/commits/$COMMIT/$JOB.jsonl + +[ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace +set -eu + +BUCKET_NAME="aztec-ci-artifacts" +LOG_FOLDER="${LOG_FOLDER:-log}" +COMMIT_HASH="${COMMIT_HASH:-$(git rev-parse HEAD)}" + +if [ ! -d "$LOG_FOLDER" ] || [ -z "$(ls -A "$LOG_FOLDER")" ]; then + echo "No logs in folder $LOG_FOLDER to upload" + exit 0 +fi + +aws s3 cp $LOG_FOLDER "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/" --include "*.jsonl" --recursive diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts new file mode 100644 index 00000000000..bb31cd1a9e8 --- /dev/null +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -0,0 +1,69 @@ +/* eslint-disable camelcase */ +import { AztecAddress } from '@aztec/aztec.js'; +import { sleep } from '@aztec/foundation/sleep'; +import { TokenContract } from '@aztec/noir-contracts/types'; + +import times from 'lodash.times'; + +import { setup } from '../fixtures/utils.js'; + +const ROLLUP_SIZES = process.env.ROLLUP_SIZES ? process.env.ROLLUP_SIZES.split(',').map(Number) : [8, 32]; + +describe('benchmarks/publish_rollup', () => { + let context: Awaited>; + let token: TokenContract; + let owner: AztecAddress; + let recipient: AztecAddress; + + beforeEach(async () => { + context = await setup(2, { maxTxsPerBlock: 1024 }); + token = await TokenContract.deploy(context.wallet).send().deployed(); + [owner, recipient] = context.accounts.map(a => a.address); + await token.methods._initialize(owner).send().wait(); + await token.methods.mint_public(owner, 10000n).send().wait(); + await context.aztecNode?.getSequencer().stop(); + }, 60_000); + + it.each(ROLLUP_SIZES)( + `publishes a rollup with %d txs`, + async (txCount: number) => { + context.logger(`Assembling rollup with ${txCount} txs`); + // Simulate and simultaneously send %d txs. These should not yet be processed since sequencer is stopped. + const calls = times(txCount, () => token.methods.transfer_public(owner, recipient, 1, 0)); + calls.forEach(call => call.simulate({ skipPublicSimulation: true })); + const sentTxs = calls.map(call => call.send()); + // Awaiting txHash waits until the aztec node has received the tx into its p2p pool + await Promise.all(sentTxs.map(tx => tx.getTxHash())); + // And then wait a bit more just in case + await sleep(100); + // Restart sequencer to process all txs together + context.aztecNode?.getSequencer().restart(); + // Wait for the last tx to be processed + await sentTxs[sentTxs.length - 1].wait({ timeout: 600_00 }); + }, + 10 * 60_000, + ); + + afterEach(async () => { + await context.teardown(); + }, 60_000); + + // afterAll(() => { + // const results = { + // l1_rollup_calldata_size_in_bytes: {} as MetricValues, + // l1_rollup_calldata_gas: {} as MetricValues, + // l1_rollup_execution_gas: {} as MetricValues, + // }; + + // for (const size of ROLLUP_SIZES) { + // const stat = stats.find(stat => stat.txCount === size); + // if (!stat) throw new Error(`Could not find stat for size ${size} (got ${stats.map(s => s.txCount).join(', ')})`); + // results.l1_rollup_calldata_gas[size] = stat.calldataGas; + // results.l1_rollup_calldata_size_in_bytes[size] = stat.calldataSize; + // results.l1_rollup_execution_gas[size] = Number(stat.gasUsed); + // context.logger(`Stats for rollup of size ${size}`, stat); + // } + + // writeFileSync(BENCHMARK_FILE, JSON.stringify(results, null, 2)); + // }); +}); diff --git a/yarn-project/end-to-end/src/fixtures/logging.ts b/yarn-project/end-to-end/src/fixtures/logging.ts new file mode 100644 index 00000000000..54adc31866a --- /dev/null +++ b/yarn-project/end-to-end/src/fixtures/logging.ts @@ -0,0 +1,34 @@ +import { onLog } from '@aztec/foundation/log'; + +import { mkdirpSync } from 'fs-extra'; +import { dirname } from 'path'; +import * as winston from 'winston'; + +const { format, transports } = winston; + +let metricsLoggerSet = false; + +/** Returns whether metrics logging should be enabled by default, checking env vars CI and BENCHMARK. */ +export function isMetricsLoggingRequested() { + return !!(process.env.CI || process.env.BENCHMARK); +} + +/** + * Configures an NDJSON logger to output entries to a local file that have an `eventName` associated. + * Idempotent and automatically called by `setup` if CI or BENCHMARK env vars are set. + */ +export function setupMetricsLogger(filename: string) { + if (metricsLoggerSet) return; + mkdirpSync(dirname(filename)); + const logger = winston.createLogger({ + level: 'debug', + format: format.combine(format.timestamp(), format.json()), + transports: [new transports.File({ filename })], + }); + onLog((level, namespace, message, data) => { + if (data && data['eventName']) { + logger.log({ ...data, level, namespace, message }); + } + }); + metricsLoggerSet = true; +} diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 2955f2b2554..596d50ed8a2 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -44,6 +44,7 @@ import { NonNativeTokenContract, TokenBridgeContract, TokenContract } from '@azt import { PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { L2BlockL2Logs, LogType, PXE, TxStatus, UnencryptedL2Log } from '@aztec/types'; +import * as path from 'path'; import { Account, Chain, @@ -60,6 +61,7 @@ import { import { mnemonicToAccount } from 'viem/accounts'; import { MNEMONIC, localAnvil } from './fixtures.js'; +import { isMetricsLoggingRequested, setupMetricsLogger } from './logging.js'; const { SANDBOX_URL = '' } = process.env; @@ -218,60 +220,50 @@ async function setupWithSandbox(account: Account, config: AztecNodeConfig, logge }; } -/** - * Sets up the environment for the end-to-end tests. - * @param numberOfAccounts - The number of new accounts to be created once the PXE is initiated. - */ -export async function setup( - numberOfAccounts = 1, - stateLoad: string | undefined = undefined, -): Promise<{ - /** - * The Aztec Node service. - */ +/** Options for the e2e tests setup */ +type SetupOptions = { /** State load */ stateLoad?: string } & Partial; + +/** Context for an end-to-end test as returned by the `setup` function */ +export type EndToEndContext = { + /** The Aztec Node service. */ aztecNode: AztecNodeService | undefined; - /** - * The Private eXecution Environment (PXE). - */ + /** The Private eXecution Environment (PXE). */ pxe: PXE; - /** - * Return values from deployL1Contracts function. - */ + /** Return values from deployL1Contracts function. */ deployL1ContractsValues: DeployL1Contracts; - /** - * The accounts created by the PXE. - */ + /** The accounts created by the PXE. */ accounts: CompleteAddress[]; - /** - * The Aztec Node configuration. - */ + /** The Aztec Node configuration. */ config: AztecNodeConfig; - /** - * The first wallet to be used. - */ + /** The first wallet to be used. */ wallet: AccountWallet; - /** - * The wallets to be used. - */ + /** The wallets to be used. */ wallets: AccountWallet[]; - /** - * Logger instance named as the current test. - */ + /** Logger instance named as the current test. */ logger: DebugLogger; - /** - * The cheat codes. - */ + /** The cheat codes. */ cheatCodes: CheatCodes; - /** - * Function to stop the started services. - */ + /** Function to stop the started services. */ teardown: () => Promise; -}> { - const config = getConfigEnvVars(); +}; + +/** + * Sets up the environment for the end-to-end tests. + * @param numberOfAccounts - The number of new accounts to be created once the PXE is initiated. + * @param opts - Options to pass to the node initialisation and to the setup script. + */ +export async function setup(numberOfAccounts = 1, opts: SetupOptions = {}): Promise { + const config = { ...getConfigEnvVars(), ...opts }; + + // Enable logging metrics to a local file named after the test suite + if (isMetricsLoggingRequested()) { + const filename = path.join('log', getJobName() + '.jsonl'); + setupMetricsLogger(filename); + } - if (stateLoad) { + if (opts.stateLoad) { const ethCheatCodes = new EthCheatCodes(config.rpcUrl); - await ethCheatCodes.loadChainState(stateLoad); + await ethCheatCodes.loadChainState(opts.stateLoad); } const logger = getLogger(); @@ -332,12 +324,17 @@ export async function setNextBlockTimestamp(rpcUrl: string, timestamp: number) { }); } +/** Returns the job name for the current test. */ +function getJobName() { + return process.env.JOB_NAME ?? expect.getState().currentTestName?.split(' ')[0].replaceAll('/', '_') ?? 'unknown'; +} + /** * Returns a logger instance for the current test. * @returns a logger instance for the current test. */ export function getLogger() { - const describeBlockName = expect.getState().currentTestName?.split(' ')[0]; + const describeBlockName = expect.getState().currentTestName?.split(' ')[0].replaceAll('/', ':'); return createDebugLogger('aztec:' + describeBlockName); } diff --git a/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts b/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts index feb46eeb440..407e223239e 100644 --- a/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts +++ b/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts @@ -64,7 +64,7 @@ describe('uniswap_trade_on_l1_from_l2', () => { wallets, logger: logger_, cheatCodes, - } = await setup(2, dumpedState); + } = await setup(2, { stateLoad: dumpedState }); const walletClient = deployL1ContractsValues.walletClient; const publicClient = deployL1ContractsValues.publicClient; diff --git a/yarn-project/foundation/src/log/index.ts b/yarn-project/foundation/src/log/index.ts index 6e42fe3c4b5..f3acad0ae58 100644 --- a/yarn-project/foundation/src/log/index.ts +++ b/yarn-project/foundation/src/log/index.ts @@ -1,7 +1,8 @@ -/** - * A callable logger instance. - */ -export type LogFn = (msg: string) => void; +/** Structured log data to include with the message. */ +export type LogData = Record; + +/** A callable logger instance. */ +export type LogFn = (msg: string, data?: LogData) => void; export * from './console.js'; export * from './debug.js'; diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts index 9d5d1ea8602..c65ba3f077e 100644 --- a/yarn-project/foundation/src/log/logger.ts +++ b/yarn-project/foundation/src/log/logger.ts @@ -2,22 +2,22 @@ import debug from 'debug'; import isNode from 'detect-node'; import { isatty } from 'tty'; -import { LogFn } from './index.js'; +import { LogData, LogFn } from './index.js'; // Matches a subset of Winston log levels const LogLevels = ['silent', 'error', 'warn', 'info', 'verbose', 'debug'] as const; -const DefaultLogLevel = 'info' as const; +const DefaultLogLevel = process.env.NODE_ENV === 'test' ? ('silent' as const) : ('info' as const); /** * A valid log severity level. */ -type LogLevel = (typeof LogLevels)[number]; +export type LogLevel = (typeof LogLevels)[number]; const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() as LogLevel; const currentLevel = LogLevels.includes(envLogLevel) ? envLogLevel : DefaultLogLevel; /** Log function that accepts an exception object */ -type ErrorLogFn = (msg: string, err?: Error | unknown) => void; +type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; /** * Logger that supports multiple severity levels. @@ -43,17 +43,17 @@ export function createDebugLogger(name: string): DebugLogger { const logger = { silent: () => {}, - error: (msg: string, err?: Error | unknown) => logWithDebug(debugLogger, 'error', formatError(msg, err)), - warn: (msg: string) => logWithDebug(debugLogger, 'warn', msg), - info: (msg: string) => logWithDebug(debugLogger, 'info', msg), - verbose: (msg: string) => logWithDebug(debugLogger, 'verbose', msg), - debug: (msg: string) => logWithDebug(debugLogger, 'debug', msg), + error: (msg: string, err?: unknown, data?: LogData) => logWithDebug(debugLogger, 'error', fmtErr(msg, err), data), + warn: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'warn', msg, data), + info: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'info', msg, data), + verbose: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'verbose', msg, data), + debug: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'debug', msg, data), }; - return Object.assign((msg: string) => logWithDebug(debugLogger, 'debug', msg), logger); + return Object.assign((msg: string, data?: LogData) => logWithDebug(debugLogger, 'debug', msg, data), logger); } /** A callback to capture all logs. */ -export type LogHandler = (level: LogLevel, namespace: string, msg: string) => void; +export type LogHandler = (level: LogLevel, namespace: string, msg: string, data?: LogData) => void; const logHandlers: LogHandler[] = []; @@ -71,14 +71,16 @@ export function onLog(handler: LogHandler) { * @param level - Intended log level. * @param args - Args to log. */ -function logWithDebug(debug: debug.Debugger, level: LogLevel, msg: string) { +function logWithDebug(debug: debug.Debugger, level: LogLevel, msg: string, data?: LogData) { for (const handler of logHandlers) { - handler(level, debug.namespace, msg); + handler(level, debug.namespace, msg, data); } + + const msgWithData = data ? `${msg} ${fmtLogData(data)}` : msg; if (debug.enabled) { - debug(msg); - } else if (LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel) && process.env.NODE_ENV !== 'test') { - printLog(`${getPrefix(debug, level)} ${msg}`); + debug(msgWithData); + } else if (LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel)) { + printLog(`${getPrefix(debug, level)} ${msgWithData}`); } } @@ -112,7 +114,17 @@ function printLog(msg: string) { * @param err - Error to log * @returns A string with both the log message and the error message. */ -function formatError(msg: string, err?: Error | unknown): string { +function fmtErr(msg: string, err?: Error | unknown): string { const errStr = err && [(err as Error).name, (err as Error).message].filter(x => !!x).join(' '); return err ? `${msg}: ${errStr || err}` : msg; } + +/** + * Formats structured log data as a string for console output. + * @param data - Optional log data. + */ +function fmtLogData(data?: LogData): string { + return Object.entries(data ?? {}) + .map(([key, value]) => `${key}=${value}`) + .join(' '); +} diff --git a/yarn-project/sequencer-client/src/publisher/index.ts b/yarn-project/sequencer-client/src/publisher/index.ts index 9fd03d25880..8f4a3a3b3e1 100644 --- a/yarn-project/sequencer-client/src/publisher/index.ts +++ b/yarn-project/sequencer-client/src/publisher/index.ts @@ -5,6 +5,34 @@ import { ViemTxSender } from './viem-tx-sender.js'; export { L1Publisher } from './l1-publisher.js'; export { PublisherConfig } from './config.js'; +/** Stats logged for each L1 rollup publish tx.*/ +export type L1PublishStats = { + /** Name of the event for metrics purposes */ + eventName: 'rollup-published-to-l1'; + /** Effective gas price of the tx. */ + gasPrice: bigint; + /** Effective gas used in the tx. */ + gasUsed: bigint; + /** Hash of the L1 tx. */ + transactionHash: string; + /** Gas cost of the calldata. */ + calldataGas: number; + /** Size in bytes of the calldata. */ + calldataSize: number; + /** Number of txs in the L2 block. */ + txCount: number; + /** Number of the L2 block. */ + blockNumber: number; + /** Number of encrypted logs. */ + encryptedLogCount: number; + /** Number of unencrypted logs. */ + unencryptedLogCount: number; + /** Serialised size of encrypted logs. */ + encryptedLogSize: number; + /** Serialised size of unencrypted logs. */ + unencryptedLogSize: number; +}; + /** * Returns a new instance of the L1Publisher. * @param config - Configuration to initialize the new instance. diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 63b34ebc624..df3c48e550c 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -21,7 +21,7 @@ describe('L1Publisher', () => { txSender = mock(); txHash = `0x${Buffer.from('txHash').toString('hex')}`; // random tx hash - txReceipt = { transactionHash: txHash, status: true }; + txReceipt = { transactionHash: txHash, status: true } as MinimalTransactionReceipt; txSender.sendProcessTx.mockResolvedValueOnce(txHash); txSender.getTransactionReceipt.mockResolvedValueOnce(txReceipt); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 5055fd0dd49..62b566b8037 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -2,21 +2,36 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { InterruptableSleep } from '@aztec/foundation/sleep'; import { ExtendedContractData, L2Block } from '@aztec/types'; +import pick from 'lodash.pick'; + import { L2BlockReceiver } from '../receiver.js'; import { PublisherConfig } from './config.js'; +import { L1PublishStats } from './index.js'; + +/** + * Stats for a sent transaction. + */ +export type TransactionStats = { + /** Hash of the transaction. */ + transactionHash: string; + /** Size in bytes of the tx calldata */ + calldataSize: number; + /** Gas required to pay for the calldata inclusion (depends on size and number of zeros) */ + calldataGas: number; +}; /** * Minimal information from a tx receipt returned by an L1PublisherTxSender. */ export type MinimalTransactionReceipt = { - /** - * True if the tx was successful, false if reverted. - */ + /** True if the tx was successful, false if reverted. */ status: boolean; - /** - * Hash of the transaction. - */ + /** Hash of the transaction. */ transactionHash: string; + /** Effective gas used by the tx */ + gasUsed: bigint; + /** Effective gas price paid by the tx */ + gasPrice: bigint; }; /** @@ -38,10 +53,7 @@ export interface L1PublisherTxSender { * @param publicKeys - The public keys of the deployed contract * @param newExtendedContractData - Data to publish. * @returns The hash of the mined tx. - * @remarks Partial addresses, public keys and contract data has to be in the same order. - * @remarks See the link bellow for more info on partial address and public key: - * https://github.com/AztecProtocol/aztec-packages/blob/master/docs/docs/concepts/foundation/accounts/keys.md#addresses-partial-addresses-and-public-keys - * TODO: replace the link above with the link to deployed docs + * @remarks Partial addresses, public keys and contract data has to be in the same order. Read more {@link https://docs.aztec.network/concepts/foundation/accounts/keys#addresses-partial-addresses-and-public-keys | here}. */ sendEmitContractDeploymentTx( l2BlockNum: number, @@ -55,6 +67,12 @@ export interface L1PublisherTxSender { * @returns Undefined if the tx hasn't been mined yet, the receipt otherwise. */ getTransactionReceipt(txHash: string): Promise; + + /** + * Returns info on a tx by calling eth_getTransaction. + * @param txHash - Hash of the tx to look for. + */ + getTransactionStats(txHash: string): Promise; } /** @@ -122,7 +140,22 @@ export class L1Publisher implements L2BlockReceiver { if (!receipt) break; // Tx was mined successfully - if (receipt.status) return true; + if (receipt.status) { + const tx = await this.txSender.getTransactionStats(txHash); + const stats: L1PublishStats = { + ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), + ...pick(tx!, 'calldataGas', 'calldataSize'), + txCount: l2BlockData.numberOfTxs, + blockNumber: l2BlockData.number, + encryptedLogCount: l2BlockData.newEncryptedLogs?.getTotalLogCount() ?? 0, + unencryptedLogCount: l2BlockData.newUnencryptedLogs?.getTotalLogCount() ?? 0, + encryptedLogSize: l2BlockData.newEncryptedLogs?.getSerializedLength() ?? 0, + unencryptedLogSize: l2BlockData.newUnencryptedLogs?.getSerializedLength() ?? 0, + eventName: 'rollup-published-to-l1', + }; + this.log.info(`Published L2 block to L1 rollup contract`, stats); + return true; + } // Check if someone else incremented the block number if (!(await this.checkNextL2BlockNum(l2BlockData.number))) { @@ -185,13 +218,18 @@ export class L1Publisher implements L2BlockReceiver { * Calling `interrupt` will cause any in progress call to `publishRollup` to return `false` asap. * Be warned, the call may return false even if the tx subsequently gets successfully mined. * In practice this shouldn't matter, as we'll only ever be calling `interrupt` when we know it's going to fail. - * A call to `clearInterrupt` is required before you can continue publishing. + * A call to `restart` is required before you can continue publishing. */ public interrupt() { this.interrupted = true; this.interruptableSleep.interrupt(); } + /** Restarts the publisher after calling `interrupt`. */ + public restart() { + this.interrupted = false; + } + // TODO: Check fee distributor has at least 0.5 ETH. // Related to https://github.com/AztecProtocol/aztec-packages/issues/1588 // eslint-disable-next-line require-await diff --git a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts index 6c9b1eecf1f..f8df1b805b0 100644 --- a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts +++ b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts @@ -13,13 +13,19 @@ import { createWalletClient, getAddress, getContract, + hexToBytes, http, } from 'viem'; import { PrivateKeyAccount, privateKeyToAccount } from 'viem/accounts'; import * as chains from 'viem/chains'; import { TxSenderConfig } from './config.js'; -import { L1PublisherTxSender, MinimalTransactionReceipt, L1ProcessArgs as ProcessTxArgs } from './l1-publisher.js'; +import { + L1PublisherTxSender, + MinimalTransactionReceipt, + L1ProcessArgs as ProcessTxArgs, + TransactionStats, +} from './l1-publisher.js'; /** * Pushes transactions to the L1 rollup contract using viem. @@ -69,6 +75,17 @@ export class ViemTxSender implements L1PublisherTxSender { }); } + async getTransactionStats(txHash: string): Promise { + const tx = await this.publicClient.getTransaction({ hash: txHash as Hex }); + if (!tx) return undefined; + const calldata = hexToBytes(tx.input); + return { + transactionHash: tx.hash, + calldataSize: calldata.length, + calldataGas: getCalldataGasUsage(calldata), + }; + } + /** * Returns a tx receipt if the tx has been mined. * @param txHash - Hash of the tx to look for. @@ -79,12 +96,12 @@ export class ViemTxSender implements L1PublisherTxSender { hash: txHash as Hex, }); - // TODO: check for confirmations - if (receipt) { return { status: receipt.status === 'success', transactionHash: txHash, + gasUsed: receipt.gasUsed, + gasPrice: receipt.effectiveGasPrice, }; } @@ -165,3 +182,12 @@ export class ViemTxSender implements L1PublisherTxSender { throw new Error(`Chain with id ${chainId} not found`); } } + +/** + * Returns cost of calldata usage in Ethereum. + * @param data - Calldata. + * @returns 4 for each zero byte, 16 for each nonzero. + */ +function getCalldataGasUsage(data: Uint8Array) { + return data.filter(byte => byte === 0).length * 4 + data.filter(byte => byte !== 0).length * 16; +} diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index e7716b039d9..4398545fc30 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -53,6 +53,7 @@ export class Sequencer { if (config.minTxsPerBlock) { this.minTxsPerBLock = config.minTxsPerBlock; } + this.log(`Initialized sequencer with ${this.minTxsPerBLock}-${this.maxTxsPerBlock} txs per block.`); } /** @@ -82,6 +83,7 @@ export class Sequencer { */ public restart() { this.log('Restarting sequencer'); + this.publisher.restart(); this.runningPromise!.start(); this.state = SequencerState.IDLE; } @@ -119,17 +121,16 @@ export class Sequencer { // Get txs to build the new block const pendingTxs = await this.p2pClient.getTxs(); if (pendingTxs.length < this.minTxsPerBLock) return; + this.log.info(`Retrieved ${pendingTxs.length} txs from P2P pool`); // Filter out invalid txs // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here const validTxs = await this.takeValidTxs(pendingTxs); - if (validTxs.length < this.minTxsPerBLock) { - return; - } + if (validTxs.length < this.minTxsPerBLock) return; const blockNumber = (await this.l2BlockSource.getBlockNumber()) + 1; - this.log.info(`Building block ${blockNumber} with ${validTxs.length} transactions...`); + this.log.info(`Building block ${blockNumber} with ${validTxs.length} transactions`); this.state = SequencerState.CREATING_BLOCK; const newGlobalVariables = await this.globalsBuilder.buildGlobalVariables(new Fr(blockNumber)); diff --git a/yarn-project/types/src/logs/l2_block_l2_logs.ts b/yarn-project/types/src/logs/l2_block_l2_logs.ts index c37e61178ad..808062964ee 100644 --- a/yarn-project/types/src/logs/l2_block_l2_logs.ts +++ b/yarn-project/types/src/logs/l2_block_l2_logs.ts @@ -31,6 +31,19 @@ export class L2BlockL2Logs { return this.txLogs.reduce((acc, logs) => acc + logs.getSerializedLength(), 0) + 4; } + /** + * Gets the total number of logs emitted from all the TxL2Logs. + */ + public getTotalLogCount(): number { + let count = 0; + for (const txLog of this.txLogs) { + for (const functionLog of txLog.functionLogs) { + count += functionLog.logs.length; + } + } + return count; + } + /** * Deserializes logs from a buffer. * @param buffer - The buffer containing the serialized logs. diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index ebc867c97db..a9088d92c38 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -379,6 +379,7 @@ __metadata: "@jest/globals": ^29.5.0 "@noble/curves": ^1.0.0 "@rushstack/eslint-patch": ^1.1.4 + "@types/fs-extra": ^11.0.2 "@types/jest": ^29.5.0 "@types/koa": ^2.13.9 "@types/koa-static": ^4.0.2 @@ -408,6 +409,7 @@ __metadata: tslib: ^2.4.0 typescript: ^5.0.4 viem: ^1.2.5 + winston: ^3.10.0 languageName: unknown linkType: soft @@ -4273,6 +4275,16 @@ __metadata: languageName: node linkType: hard +"@types/fs-extra@npm:^11.0.2": + version: 11.0.2 + resolution: "@types/fs-extra@npm:11.0.2" + dependencies: + "@types/jsonfile": "*" + "@types/node": "*" + checksum: 5b3e30343ee62d2e393e1029355f13f64bab6f3416226e22492483f99da840e2e53ca22cbfa4ac3749f2f83f7086d19c009005c8fa175da01df0fae59c2d73e1 + languageName: node + linkType: hard + "@types/graceful-fs@npm:^4.1.3": version: 4.1.6 resolution: "@types/graceful-fs@npm:4.1.6" From 380508176c7afbd2c9889649123b03cc32c4f228 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 18:40:11 -0300 Subject: [PATCH 03/22] End-to-end doesnt like dev dependencies --- yarn-project/end-to-end/package.json | 3 +-- yarn-project/yarn.lock | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 78a811fc0ac..6d3d3426817 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -39,6 +39,7 @@ "@aztec/world-state": "workspace:^", "@jest/globals": "^29.5.0", "@noble/curves": "^1.0.0", + "@types/fs-extra": "^11.0.2", "@types/jest": "^29.5.0", "@types/koa": "^2.13.9", "@types/koa-static": "^4.0.2", @@ -70,8 +71,6 @@ }, "devDependencies": { "@rushstack/eslint-patch": "^1.1.4", - "@types/fs-extra": "^11.0.2", - "@types/lodash.compact": "^3.0.7", "concurrently": "^7.6.0" }, "files": [ diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index a9088d92c38..cf21ff9d41a 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -384,7 +384,6 @@ __metadata: "@types/koa": ^2.13.9 "@types/koa-static": ^4.0.2 "@types/levelup": ^5.1.2 - "@types/lodash.compact": ^3.0.7 "@types/lodash.every": ^4.6.7 "@types/lodash.times": ^4.3.7 "@types/lodash.zip": ^4.2.7 From b3ba3e12b07961ffe20536fcec1dbff4fb079cba Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 18:48:25 -0300 Subject: [PATCH 04/22] Remove commented out code --- .../benchmarks/bench_publish_rollup.test.ts | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index bb31cd1a9e8..7f1136cfaf2 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -47,23 +47,4 @@ describe('benchmarks/publish_rollup', () => { afterEach(async () => { await context.teardown(); }, 60_000); - - // afterAll(() => { - // const results = { - // l1_rollup_calldata_size_in_bytes: {} as MetricValues, - // l1_rollup_calldata_gas: {} as MetricValues, - // l1_rollup_execution_gas: {} as MetricValues, - // }; - - // for (const size of ROLLUP_SIZES) { - // const stat = stats.find(stat => stat.txCount === size); - // if (!stat) throw new Error(`Could not find stat for size ${size} (got ${stats.map(s => s.txCount).join(', ')})`); - // results.l1_rollup_calldata_gas[size] = stat.calldataGas; - // results.l1_rollup_calldata_size_in_bytes[size] = stat.calldataSize; - // results.l1_rollup_execution_gas[size] = Number(stat.gasUsed); - // context.logger(`Stats for rollup of size ${size}`, stat); - // } - - // writeFileSync(BENCHMARK_FILE, JSON.stringify(results, null, 2)); - // }); }); From 9f7ce0ee205c85d440f2d2ad9a8300068bf9ed07 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 18:52:34 -0300 Subject: [PATCH 05/22] Fix path to script --- yarn-project/end-to-end/scripts/run_tests_local | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/scripts/run_tests_local b/yarn-project/end-to-end/scripts/run_tests_local index 5b223bceea4..5a77abc3af1 100755 --- a/yarn-project/end-to-end/scripts/run_tests_local +++ b/yarn-project/end-to-end/scripts/run_tests_local @@ -21,4 +21,4 @@ docker-compose -f $COMPOSE_FILE rm -f docker-compose -f $COMPOSE_FILE up --exit-code-from end-to-end # Upload the logs with metrics to S3 -upload_logs_to_s3 \ No newline at end of file +scripts/upload_logs_to_s3 \ No newline at end of file From f25158f5d1f18bcce2ac9412c9fcffd8e7c712eb Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 19:13:29 -0300 Subject: [PATCH 06/22] Fix path to script again --- yarn-project/end-to-end/scripts/run_tests_local | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/scripts/run_tests_local b/yarn-project/end-to-end/scripts/run_tests_local index 5a77abc3af1..3df64ff4061 100755 --- a/yarn-project/end-to-end/scripts/run_tests_local +++ b/yarn-project/end-to-end/scripts/run_tests_local @@ -21,4 +21,4 @@ docker-compose -f $COMPOSE_FILE rm -f docker-compose -f $COMPOSE_FILE up --exit-code-from end-to-end # Upload the logs with metrics to S3 -scripts/upload_logs_to_s3 \ No newline at end of file +./scripts/upload_logs_to_s3.sh \ No newline at end of file From f21a1436d8b003055758f1ad0ebed1b1a9fb025a Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 19:32:54 -0300 Subject: [PATCH 07/22] Comment to PR --- scripts/ci/aggregate_e2e_benchmark.js | 23 ++--- scripts/ci/assemble_e2e_benchmark.sh | 3 + scripts/ci/benchmark_shared.js | 24 +++++ scripts/ci/comment_e2e_benchmark.js | 137 ++++++++++++++++++++++++++ 4 files changed, 172 insertions(+), 15 deletions(-) create mode 100644 scripts/ci/benchmark_shared.js create mode 100644 scripts/ci/comment_e2e_benchmark.js diff --git a/scripts/ci/aggregate_e2e_benchmark.js b/scripts/ci/aggregate_e2e_benchmark.js index 82ba6c90c8f..faa12fb47f7 100644 --- a/scripts/ci/aggregate_e2e_benchmark.js +++ b/scripts/ci/aggregate_e2e_benchmark.js @@ -7,25 +7,18 @@ const fs = require("fs"); const path = require("path"); const readline = require("readline"); -// Metrics to capture -const L1_ROLLUP_CALLDATA_SIZE_IN_BYTES = "l1_rollup_calldata_size_in_bytes"; -const L1_ROLLUP_CALLDATA_GAS = "l1_rollup_calldata_gas"; -const L1_ROLLUP_EXECUTION_GAS = "l1_rollup_execution_gas"; - -// Events to track -const ROLLUP_PUBLISHED_TO_L1 = "rollup-published-to-l1"; - -// Rollup sizes to track (duplicated from yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts) -const ROLLUP_SIZES = process.env.ROLLUP_SIZES - ? process.env.ROLLUP_SIZES.split(",").map(Number) - : [8, 32, 128]; +const { + L1_ROLLUP_CALLDATA_SIZE_IN_BYTES, + L1_ROLLUP_CALLDATA_GAS, + L1_ROLLUP_EXECUTION_GAS, + ROLLUP_PUBLISHED_TO_L1, + ROLLUP_SIZES, + BENCHMARK_FILE_JSON, +} = require("./benchmark_shared.js"); // Folder where to load logs from const logsDir = process.env.LOGS_DIR ?? `log`; -// Output files -const BENCHMARK_FILE_JSON = process.env.BENCHMARK_FILE_JSON ?? "benchmark.json"; - // Appends a datapoint to the final results for the given metric in the given bucket function append(results, metric, bucket, value) { if (value === undefined) { diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh index b9754badfc2..2540c90c992 100755 --- a/scripts/ci/assemble_e2e_benchmark.sh +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -23,6 +23,9 @@ aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/${BEN if [ "${CIRCLE_BRANCH:-}" = "master" ]; then aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/benchmarks-v1/${COMMIT_HASH}.json" aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/benchmarks-v1/master.json" +else + node scripts/ci/comment_e2e_benchmark.js + echo "commented on pr" fi diff --git a/scripts/ci/benchmark_shared.js b/scripts/ci/benchmark_shared.js new file mode 100644 index 00000000000..6554b15c6e2 --- /dev/null +++ b/scripts/ci/benchmark_shared.js @@ -0,0 +1,24 @@ +// Metrics to capture +const L1_ROLLUP_CALLDATA_SIZE_IN_BYTES = "l1_rollup_calldata_size_in_bytes"; +const L1_ROLLUP_CALLDATA_GAS = "l1_rollup_calldata_gas"; +const L1_ROLLUP_EXECUTION_GAS = "l1_rollup_execution_gas"; + +// Events to track +const ROLLUP_PUBLISHED_TO_L1 = "rollup-published-to-l1"; + +// Rollup sizes to track (duplicated from yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts) +const ROLLUP_SIZES = process.env.ROLLUP_SIZES + ? process.env.ROLLUP_SIZES.split(",").map(Number) + : [8, 32, 128]; + +// Output files +const BENCHMARK_FILE_JSON = process.env.BENCHMARK_FILE_JSON ?? "benchmark.json"; + +module.exports = { + L1_ROLLUP_CALLDATA_SIZE_IN_BYTES, + L1_ROLLUP_CALLDATA_GAS, + L1_ROLLUP_EXECUTION_GAS, + ROLLUP_PUBLISHED_TO_L1, + ROLLUP_SIZES, + BENCHMARK_FILE_JSON, +}; diff --git a/scripts/ci/comment_e2e_benchmark.js b/scripts/ci/comment_e2e_benchmark.js new file mode 100644 index 00000000000..5eb2e7f98bb --- /dev/null +++ b/scripts/ci/comment_e2e_benchmark.js @@ -0,0 +1,137 @@ +// Given a local benchmark json aggregated file, reformats it in markdown +// and comments on the PR that prompted it. If the CI is rerun, the comment +// is updated. + +const https = require("https"); +const fs = require("fs"); + +const GITHUB_TOKEN = process.env.GITHUB_TOKEN; +const OWNER = "AztecProtocol"; +const REPO = "aztec3-packages"; +const COMMENT_MARK = ""; + +const { ROLLUP_SIZES, BENCHMARK_FILE_JSON } = require("./benchmark_shared.js"); + +// Returns the md content to post +function getContent() { + const benchmark = JSON.parse(fs.readFileSync(BENCHMARK_FILE_JSON, "utf-8")); + delete benchmark.timestamp; + + const sizes = ROLLUP_SIZES; + const header = `| Metric | ${sizes.map((i) => `${i} txs`).join(" | ")} |`; + const separator = `| - | ${sizes.map(() => "-").join(" | ")} |`; + const rows = Object.keys(benchmark).map((key) => { + const metric = benchmark[key]; + return `${key} | ${sizes.map((i) => metric[i]).join(" | ")} |`; + }); + + return ` +# Benchmark results + +### On rollup published to L1 + +${header} +${separator} +${rows.join("\n")} + +${COMMENT_MARK} +`; +} + +// Returns the number of the current PR +function getPrNumber() { + if (!process.env.CIRCLE_PULL_REQUEST) throw new Error(`Not in Circle PR`); + const fragments = process.env.CIRCLE_PULL_REQUEST.split("/"); + return fragments[fragments.length - 1]; +} + +// Function to check if a bench comment already exists +async function getExistingComment() { + try { + const response = await sendGitHubRequest( + `/repos/${OWNER}/${REPO}/issues/${getPrNumber()}/comments` + ); + const comments = JSON.parse(response); + return comments.find((comment) => comment.body.includes(COMMENT_MARK)); + } catch (error) { + console.error("Error checking for existing comments:", error.message); + process.exit(1); + } +} + +// Function to create or update a comment +async function upsertComment(existingCommentId) { + try { + const commentContent = getContent(); + const commentData = { body: commentContent }; + + const requestMethod = existingCommentId ? "PATCH" : "POST"; + const requestUrl = existingCommentId + ? `/repos/${OWNER}/${REPO}/issues/comments/${existingCommentId}` + : `/repos/${OWNER}/${REPO}/issues/${getPrNumber()}/comments`; + + await sendGitHubRequest(requestUrl, requestMethod, commentData); + console.log("Comment added or updated successfully."); + } catch (error) { + console.error("Error adding or updating comment:", error.message); + process.exit(1); + } +} + +// Function to send a request to the GitHub API +async function sendGitHubRequest(url, method = "GET", data = null) { + const apiUrl = url.startsWith("http") ? url : `https://api.github.com${url}`; + const headers = { + Authorization: `Bearer ${GITHUB_TOKEN}`, + "User-Agent": OWNER, + }; + if (data) headers["Content-Type"] = "application/json"; + + const requestOptions = { method, headers }; + if (data) requestOptions.body = JSON.stringify(data); + + return new Promise((resolve, reject) => { + const req = https.request(apiUrl, requestOptions, (res) => { + if ( + res.statusCode === 301 || + res.statusCode === 302 || + res.statusCode === 307 + ) { + sendGitHubRequest(res.headers.location, method, data) + .then(resolve) + .catch(reject); + return; + } else { + let data = ""; + res.on("data", (chunk) => { + data += chunk; + }); + + res.on("end", () => { + if (res.statusCode >= 200 && res.statusCode < 300) { + resolve(data); + } else { + reject( + new Error( + `GitHub API request failed with ${res.statusCode}: ${data}` + ) + ); + } + }); + } + }); + + req.on("error", (error) => { + reject(error); + }); + + req.end(); + }); +} + +async function main() { + const existingComment = await getExistingComment(); + await upsertComment(existingComment?.id); +} + +main(); From 930516a6cc9d93f9c5e96cec935f2ea9a102dba7 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 19:35:38 -0300 Subject: [PATCH 08/22] Set benchmark true on docker composes --- .../end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml | 1 + yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml | 1 + yarn-project/end-to-end/scripts/docker-compose-p2p.yml | 1 + yarn-project/end-to-end/scripts/docker-compose.yml | 1 + 4 files changed, 4 insertions(+) diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml index 98197fbdffd..a9f71fb021f 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml @@ -31,6 +31,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + BENCHMARK: true DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml index 331263921b5..f1fe6e0aab7 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml @@ -31,6 +31,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + BENCHMARK: true DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 diff --git a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml index b6481251ff0..4476f47f73e 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml @@ -21,6 +21,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + BENCHMARK: true DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index 90d90ee559a..ab50d49d932 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -15,6 +15,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + BENCHMARK: true DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 From fd123159857226208d03a1219e7880bf7df44ab7 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 19:58:02 -0300 Subject: [PATCH 09/22] Fix path to log folder in compose files --- .../end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml | 2 +- yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml | 2 +- yarn-project/end-to-end/scripts/docker-compose-p2p.yml | 2 +- yarn-project/end-to-end/scripts/docker-compose.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml index a9f71fb021f..7436b41d6fb 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml @@ -39,4 +39,4 @@ services: SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} entrypoint: ['./scripts/start_e2e_ci_browser.sh', './src/e2e_aztec_js_browser.test.ts'] volumes: - - ./log:/usr/src/yarn-project/end-to-end/log:rw + - ../log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml index f1fe6e0aab7..e59bb5c062f 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml @@ -39,4 +39,4 @@ services: SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} command: ${TEST:-./src/e2e_deploy_contract.test.ts} volumes: - - ./log:/usr/src/yarn-project/end-to-end/log:rw + - ../log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml index 4476f47f73e..c6d607a79bb 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml @@ -38,4 +38,4 @@ services: BOOTSTRAP_NODES: '/ip4/p2p-bootstrap/tcp/40400/p2p/12D3KooWGBpbC6qQFkaCYphjNeY6sV99o4SnEWyTeBigoVriDn4D' command: ${TEST:-./src/e2e_p2p_network.test.ts} volumes: - - ./log:/usr/src/yarn-project/end-to-end/log:rw + - ../log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index ab50d49d932..69db7b71b14 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -28,4 +28,4 @@ services: SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} command: ${TEST:-./src/e2e_deploy_contract.test.ts} volumes: - - ./log:/usr/src/yarn-project/end-to-end/log:rw + - ../log:/usr/src/yarn-project/end-to-end/log:rw From 578c3ff23a59ba2f499a6a1c3b3e7a5c59a6618a Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 19:58:20 -0300 Subject: [PATCH 10/22] Fix post to gh comment --- scripts/ci/comment_e2e_benchmark.js | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/ci/comment_e2e_benchmark.js b/scripts/ci/comment_e2e_benchmark.js index 5eb2e7f98bb..67dcdfc2a92 100644 --- a/scripts/ci/comment_e2e_benchmark.js +++ b/scripts/ci/comment_e2e_benchmark.js @@ -26,9 +26,9 @@ function getContent() { }); return ` -# Benchmark results +## Benchmark results -### On rollup published to L1 +### Rollup published to L1 ${header} ${separator} @@ -83,12 +83,12 @@ async function sendGitHubRequest(url, method = "GET", data = null) { const apiUrl = url.startsWith("http") ? url : `https://api.github.com${url}`; const headers = { Authorization: `Bearer ${GITHUB_TOKEN}`, + Accept: "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", "User-Agent": OWNER, }; if (data) headers["Content-Type"] = "application/json"; - const requestOptions = { method, headers }; - if (data) requestOptions.body = JSON.stringify(data); return new Promise((resolve, reject) => { const req = https.request(apiUrl, requestOptions, (res) => { @@ -125,6 +125,7 @@ async function sendGitHubRequest(url, method = "GET", data = null) { reject(error); }); + if (data) req.write(JSON.stringify(data)); req.end(); }); } From 3c89354abfc942187a96deac40756a55c27e2a33 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 20:18:56 -0300 Subject: [PATCH 11/22] Allow commenting to fail --- scripts/ci/comment_e2e_benchmark.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/ci/comment_e2e_benchmark.js b/scripts/ci/comment_e2e_benchmark.js index 67dcdfc2a92..23e999e8039 100644 --- a/scripts/ci/comment_e2e_benchmark.js +++ b/scripts/ci/comment_e2e_benchmark.js @@ -131,8 +131,12 @@ async function sendGitHubRequest(url, method = "GET", data = null) { } async function main() { - const existingComment = await getExistingComment(); - await upsertComment(existingComment?.id); + try { + const existingComment = await getExistingComment(); + await upsertComment(existingComment?.id); + } catch (err) { + console.error(`Error during execution. Failing silently.`, err); + } } main(); From 9443b3d7461cf3e0a5d60afe0cfe66fd5ce368dd Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 20:21:58 -0300 Subject: [PATCH 12/22] Only process the bench jsonls for the aggregated benchmark --- scripts/ci/assemble_e2e_benchmark.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh index 2540c90c992..e8c434454a1 100755 --- a/scripts/ci/assemble_e2e_benchmark.sh +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -13,7 +13,7 @@ BENCHMARK_FILE_JSON="benchmark.json" mkdir -p $LOG_FOLDER -aws s3 cp "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/" $LOG_FOLDER --include "bench*.jsonl" --recursive +aws s3 cp "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/" $LOG_FOLDER --exclude '*' --include 'bench*.jsonl' --recursive node scripts/ci/aggregate_e2e_benchmark.js echo "generated: $BENCHMARK_FILE_JSON" From bad2c053b2612a33546e6b40f6eef9652c817146 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 29 Sep 2023 20:27:49 -0300 Subject: [PATCH 13/22] Remove exit 1s from chatgpt --- scripts/ci/comment_e2e_benchmark.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/scripts/ci/comment_e2e_benchmark.js b/scripts/ci/comment_e2e_benchmark.js index 23e999e8039..a6dbecdccf6 100644 --- a/scripts/ci/comment_e2e_benchmark.js +++ b/scripts/ci/comment_e2e_benchmark.js @@ -54,8 +54,7 @@ async function getExistingComment() { const comments = JSON.parse(response); return comments.find((comment) => comment.body.includes(COMMENT_MARK)); } catch (error) { - console.error("Error checking for existing comments:", error.message); - process.exit(1); + throw new Error("Error checking for existing comments: " + error.message); } } @@ -73,8 +72,7 @@ async function upsertComment(existingCommentId) { await sendGitHubRequest(requestUrl, requestMethod, commentData); console.log("Comment added or updated successfully."); } catch (error) { - console.error("Error adding or updating comment:", error.message); - process.exit(1); + throw new Error("Error adding or updating comment: " + error.message); } } From 075410061c02dc02f19e76a040f896019dd88086 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 2 Oct 2023 11:47:33 -0300 Subject: [PATCH 14/22] Change benchmark and logs locations --- scripts/ci/assemble_e2e_benchmark.sh | 47 +++++++++++++++---- scripts/ci/store_test_benchmark_logs | 3 ++ .../end-to-end/scripts/upload_logs_to_s3.sh | 17 ++++++- 3 files changed, 57 insertions(+), 10 deletions(-) diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh index e8c434454a1..9949a0591b5 100755 --- a/scripts/ci/assemble_e2e_benchmark.sh +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -11,21 +11,52 @@ LOG_FOLDER="${LOG_FOLDER:-log}" COMMIT_HASH="${COMMIT_HASH:-$(git rev-parse HEAD)}" BENCHMARK_FILE_JSON="benchmark.json" -mkdir -p $LOG_FOLDER +# Adapted from yarn-project/end-to-end/scripts/upload_logs_to_s3.sh +if [ "${CIRCLE_BRANCH:-}" = "master" ]; then + LOG_SOURCE_FOLDER="logs-v1/master/$COMMIT_HASH/" + BENCHMARK_TARGET_FILE="benchmarks-v1/master/$COMMIT_HASH.json" + BENCHMARK_LATEST_FILE="benchmarks-v1/latest.json" +elif [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then + LOG_SOURCE_FOLDER="logs-v1/pulls/$CIRCLE_PULL_REQUEST" + BENCHMARK_TARGET_FILE="benchmarks-v1/pulls/$CIRCLE_PULL_REQUEST.json" +else + echo "Can only run benchmark aggregation on master or on a PR. Ensure CIRCLE_BRANCH or CIRCLE_PULL_REQUEST are set." + exit 1 +fi -aws s3 cp "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/" $LOG_FOLDER --exclude '*' --include 'bench*.jsonl' --recursive +# Download benchmark log files from S3 LOG_SOURCE_FOLDER into local LOG_FOLDER +mkdir -p $LOG_FOLDER +aws s3 cp "s3://${BUCKET_NAME}/${LOG_SOURCE_FOLDER}/" $LOG_FOLDER --exclude '*' --include 'bench*.jsonl' --recursive + +# Only generate the aggregated benchmark if we've managed to retrieve all the needed log files +# If these runs were skipped due to no changes in their rebuild-patterns, then there's no need +# to recompute the aggregated benchmark. Note that if one benchmark did run but others didn't, +# this skips the whole aggregation. For now, that's fine because all benchmark files have the +# same rebuild pattern rules. But if that changes, then we'd need to go up in the commit history +# to find the latest log files for the unchanged benchmarks. +EXPECTED_BENCHMARK_COUNT=$(find yarn-project/end-to-end/src -type f -name "bench*.test.ts" | wc -l) +DOWNLOADED_BENCHMARK_COUNT=$(find $LOG_FOLDER -type f -name "*.jsonl" | wc -l) +if [ "$DOWNLOADED_BENCHMARK_COUNT" -lt "$EXPECTED_BENCHMARK_COUNT"]; then + echo Found $DOWNLOADED_BENCHMARK_COUNT out of $EXPECTED_BENCHMARK_COUNT benchmark log files. Exiting. + exit 0 +fi +# Generate the aggregated benchmark file node scripts/ci/aggregate_e2e_benchmark.js echo "generated: $BENCHMARK_FILE_JSON" -aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/${BENCHMARK_FILE_JSON}" +# Upload it to master or pulls +aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/${BENCHMARK_TARGET_FILE}" -if [ "${CIRCLE_BRANCH:-}" = "master" ]; then - aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/benchmarks-v1/${COMMIT_HASH}.json" - aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/benchmarks-v1/master.json" -else +# If on master, also update the "latest" benchmark file +if [ -n "${BENCHMARK_LATEST_FILE:-}" ]; then + aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/${BENCHMARK_LATEST_FILE}" +fi + +# If on a pull request, comment on it +if [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then node scripts/ci/comment_e2e_benchmark.js - echo "commented on pr" + echo "commented on pr $CIRCLE_PULL_REQUEST" fi diff --git a/scripts/ci/store_test_benchmark_logs b/scripts/ci/store_test_benchmark_logs index 403d3ed9360..3554aa746d2 100755 --- a/scripts/ci/store_test_benchmark_logs +++ b/scripts/ci/store_test_benchmark_logs @@ -1,4 +1,7 @@ #!/bin/bash +# Script for storing barretenberg benchmark results. +# Uploads results to the AztecProtocol/benchmark-archive repository. + [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -e diff --git a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh index 1fe7a66c1f2..4ce06915a03 100755 --- a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh +++ b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh @@ -1,7 +1,9 @@ #!/bin/bash + # Uploads to S3 the contents of the log file mounted on the end-to-end container, # which contains log entries with an associated event and metrics for it. -# Logs are uploaded to aztec-ci-artifacts/commits/$COMMIT/$JOB.jsonl +# Logs are uploaded to aztec-ci-artifacts/logs-v1/master/$COMMIT/$JOB.jsonl +# or to aztec-ci-artifacts/logs-v1/pulls/$PRNUMBER/$JOB.jsonl if on a PR [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu @@ -15,4 +17,15 @@ if [ ! -d "$LOG_FOLDER" ] || [ -z "$(ls -A "$LOG_FOLDER")" ]; then exit 0 fi -aws s3 cp $LOG_FOLDER "s3://${BUCKET_NAME}/commits/${COMMIT_HASH}/" --include "*.jsonl" --recursive +# Duplicated in scripts/ci/assemble_e2e_benchmark.sh +if [ "${CIRCLE_BRANCH:-}" = "master" ]; then + TARGET_FOLDER="logs-v1/master/$COMMIT_HASH/" +elif [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then + TARGET_FOLDER="logs-v1/pulls/$CIRCLE_PULL_REQUEST" +fi + +if [ -n "${TARGET_FOLDER:-}"]; then + aws s3 cp $LOG_FOLDER "s3://${BUCKET_NAME}/${TARGET_FOLDER}" --include "*.jsonl" --recursive +else + echo Skipping upload since no target folder was defined +fi \ No newline at end of file From 95f4ea4a4a37706ca8cf13a0f2e1c5db3fc2a895 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 2 Oct 2023 11:53:49 -0300 Subject: [PATCH 15/22] Trigger run --- .../end-to-end/src/benchmarks/bench_publish_rollup.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index 7f1136cfaf2..27f43081f02 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -20,7 +20,7 @@ describe('benchmarks/publish_rollup', () => { token = await TokenContract.deploy(context.wallet).send().deployed(); [owner, recipient] = context.accounts.map(a => a.address); await token.methods._initialize(owner).send().wait(); - await token.methods.mint_public(owner, 10000n).send().wait(); + await token.methods.mint_public(owner, 100000n).send().wait(); await context.aztecNode?.getSequencer().stop(); }, 60_000); From 2571b839770b7f9404991ecd7ee0f2a8e7e9ba3c Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 2 Oct 2023 12:07:00 -0300 Subject: [PATCH 16/22] Fix script and trigger rerun --- yarn-project/end-to-end/scripts/upload_logs_to_s3.sh | 2 +- .../end-to-end/src/benchmarks/bench_publish_rollup.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh index 4ce06915a03..d5f0e6f5da6 100755 --- a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh +++ b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh @@ -24,7 +24,7 @@ elif [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then TARGET_FOLDER="logs-v1/pulls/$CIRCLE_PULL_REQUEST" fi -if [ -n "${TARGET_FOLDER:-}"]; then +if [ -n "${TARGET_FOLDER:-}" ]; then aws s3 cp $LOG_FOLDER "s3://${BUCKET_NAME}/${TARGET_FOLDER}" --include "*.jsonl" --recursive else echo Skipping upload since no target folder was defined diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index 27f43081f02..1a925ed7c5b 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -20,7 +20,7 @@ describe('benchmarks/publish_rollup', () => { token = await TokenContract.deploy(context.wallet).send().deployed(); [owner, recipient] = context.accounts.map(a => a.address); await token.methods._initialize(owner).send().wait(); - await token.methods.mint_public(owner, 100000n).send().wait(); + await token.methods.mint_public(owner, 20000n).send().wait(); await context.aztecNode?.getSequencer().stop(); }, 60_000); From 9ab1fde8c810d328dcb93bb68b07a63a34f8ba89 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 2 Oct 2023 12:14:39 -0300 Subject: [PATCH 17/22] Fix logging when commenting failed --- scripts/ci/assemble_e2e_benchmark.sh | 3 +-- scripts/ci/comment_e2e_benchmark.js | 3 ++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh index 9949a0591b5..1ada2baadc2 100755 --- a/scripts/ci/assemble_e2e_benchmark.sh +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -55,8 +55,7 @@ fi # If on a pull request, comment on it if [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then - node scripts/ci/comment_e2e_benchmark.js - echo "commented on pr $CIRCLE_PULL_REQUEST" + (node scripts/ci/comment_e2e_benchmark.js && echo "commented on pr $CIRCLE_PULL_REQUEST") || echo "failed commenting on pr" fi diff --git a/scripts/ci/comment_e2e_benchmark.js b/scripts/ci/comment_e2e_benchmark.js index a6dbecdccf6..16f6541a048 100644 --- a/scripts/ci/comment_e2e_benchmark.js +++ b/scripts/ci/comment_e2e_benchmark.js @@ -133,7 +133,8 @@ async function main() { const existingComment = await getExistingComment(); await upsertComment(existingComment?.id); } catch (err) { - console.error(`Error during execution. Failing silently.`, err); + console.error(`error while commenting on pull request:`, err); + process.exit(1); } } From a400f4750f146db8bd1e33ad87637555305f1a83 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 2 Oct 2023 14:29:42 -0300 Subject: [PATCH 18/22] Upload to pr number, not full url --- scripts/ci/assemble_e2e_benchmark.sh | 4 ++-- yarn-project/end-to-end/scripts/upload_logs_to_s3.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh index 1ada2baadc2..53173ea2b2e 100755 --- a/scripts/ci/assemble_e2e_benchmark.sh +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -17,8 +17,8 @@ if [ "${CIRCLE_BRANCH:-}" = "master" ]; then BENCHMARK_TARGET_FILE="benchmarks-v1/master/$COMMIT_HASH.json" BENCHMARK_LATEST_FILE="benchmarks-v1/latest.json" elif [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then - LOG_SOURCE_FOLDER="logs-v1/pulls/$CIRCLE_PULL_REQUEST" - BENCHMARK_TARGET_FILE="benchmarks-v1/pulls/$CIRCLE_PULL_REQUEST.json" + LOG_SOURCE_FOLDER="logs-v1/pulls/${CIRCLE_PULL_REQUEST##*/}" + BENCHMARK_TARGET_FILE="benchmarks-v1/pulls/${CIRCLE_PULL_REQUEST##*/}.json" else echo "Can only run benchmark aggregation on master or on a PR. Ensure CIRCLE_BRANCH or CIRCLE_PULL_REQUEST are set." exit 1 diff --git a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh index d5f0e6f5da6..eb81df8bfac 100755 --- a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh +++ b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh @@ -21,7 +21,7 @@ fi if [ "${CIRCLE_BRANCH:-}" = "master" ]; then TARGET_FOLDER="logs-v1/master/$COMMIT_HASH/" elif [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then - TARGET_FOLDER="logs-v1/pulls/$CIRCLE_PULL_REQUEST" + TARGET_FOLDER="logs-v1/pulls/${CIRCLE_PULL_REQUEST##*/}" fi if [ -n "${TARGET_FOLDER:-}" ]; then From c7cdec265a075db539f8aab4645fa29c8bac46dd Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 2 Oct 2023 14:30:03 -0300 Subject: [PATCH 19/22] Trigger run --- .../end-to-end/src/benchmarks/bench_publish_rollup.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index 1a925ed7c5b..7f1136cfaf2 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -20,7 +20,7 @@ describe('benchmarks/publish_rollup', () => { token = await TokenContract.deploy(context.wallet).send().deployed(); [owner, recipient] = context.accounts.map(a => a.address); await token.methods._initialize(owner).send().wait(); - await token.methods.mint_public(owner, 20000n).send().wait(); + await token.methods.mint_public(owner, 10000n).send().wait(); await context.aztecNode?.getSequencer().stop(); }, 60_000); From d1531a53c170bf6944a871beb991fe6e2ee62497 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Mon, 2 Oct 2023 15:04:45 -0300 Subject: [PATCH 20/22] Add rollup size 128 for testing --- .../end-to-end/src/benchmarks/bench_publish_rollup.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index 7f1136cfaf2..77510788c7a 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -7,7 +7,7 @@ import times from 'lodash.times'; import { setup } from '../fixtures/utils.js'; -const ROLLUP_SIZES = process.env.ROLLUP_SIZES ? process.env.ROLLUP_SIZES.split(',').map(Number) : [8, 32]; +const ROLLUP_SIZES = process.env.ROLLUP_SIZES ? process.env.ROLLUP_SIZES.split(',').map(Number) : [8, 32, 128]; describe('benchmarks/publish_rollup', () => { let context: Awaited>; From b495c29bd2159a8783bd01eb2ddd1db09506ce68 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 3 Oct 2023 09:18:32 -0300 Subject: [PATCH 21/22] Fix build issue after rebase --- .../end-to-end/src/benchmarks/bench_publish_rollup.test.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index 77510788c7a..5b781b0541f 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -17,9 +17,8 @@ describe('benchmarks/publish_rollup', () => { beforeEach(async () => { context = await setup(2, { maxTxsPerBlock: 1024 }); - token = await TokenContract.deploy(context.wallet).send().deployed(); + token = await TokenContract.deploy(context.wallet, owner).send().deployed(); [owner, recipient] = context.accounts.map(a => a.address); - await token.methods._initialize(owner).send().wait(); await token.methods.mint_public(owner, 10000n).send().wait(); await context.aztecNode?.getSequencer().stop(); }, 60_000); From 5b5f10c9075214e79c33d1855f64bbb04cc5b817 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 3 Oct 2023 09:38:33 -0300 Subject: [PATCH 22/22] Fix initialisation in test --- .../end-to-end/src/benchmarks/bench_publish_rollup.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index 5b781b0541f..8c42bafb577 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -17,8 +17,8 @@ describe('benchmarks/publish_rollup', () => { beforeEach(async () => { context = await setup(2, { maxTxsPerBlock: 1024 }); - token = await TokenContract.deploy(context.wallet, owner).send().deployed(); [owner, recipient] = context.accounts.map(a => a.address); + token = await TokenContract.deploy(context.wallet, owner).send().deployed(); await token.methods.mint_public(owner, 10000n).send().wait(); await context.aztecNode?.getSequencer().stop(); }, 60_000);