From f54c3cb5be291696951dd093f399f3f15e1fd007 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 4 Jul 2024 14:55:24 -0300 Subject: [PATCH] feat: Remove proof from L1 Rollup process Batch proof for multiple blocks will be uploaded separately. Related to #7346 --- l1-contracts/src/core/Rollup.sol | 40 +------------------ l1-contracts/src/core/interfaces/IRollup.sol | 7 +--- l1-contracts/test/Rollup.t.sol | 10 ++--- yarn-project/accounts/package.local.json | 9 +---- .../archiver/src/archiver/archiver.test.ts | 4 +- .../archiver/src/archiver/eth_log_handlers.ts | 2 +- .../composed/integration_l1_publisher.test.ts | 22 ++-------- .../integration_proof_verification.test.ts | 6 +-- .../package.local.json | 9 +---- .../protocol-contracts/package.local.json | 9 +---- .../src/publisher/l1-publisher.test.ts | 18 ++++----- .../src/publisher/l1-publisher.ts | 11 +---- .../src/publisher/viem-tx-sender.ts | 7 +--- .../src/sequencer/sequencer.ts | 8 ++-- 14 files changed, 37 insertions(+), 125 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 4ef4bc84302..8dd3e22055f 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -101,14 +101,8 @@ contract Rollup is IRollup { * @notice Process an incoming L2 block and progress the state * @param _header - The L2 block header * @param _archive - A root of the archive tree after the L2 block is applied - * @param _proof - The proof of correct execution */ - function process( - bytes calldata _header, - bytes32 _archive, - bytes calldata _aggregationObject, - bytes calldata _proof - ) external override(IRollup) { + function process(bytes calldata _header, bytes32 _archive) external override(IRollup) { // Decode and validate header HeaderLib.Header memory header = HeaderLib.decode(_header); HeaderLib.validate(header, VERSION, lastBlockTs, archive); @@ -124,38 +118,6 @@ contract Rollup is IRollup { revert Errors.Rollup__InvalidSequencer(msg.sender); } - bytes32[] memory publicInputs = - new bytes32[](3 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH); - // the archive tree root - publicInputs[0] = _archive; - // this is the _next_ available leaf in the archive tree - // normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed) - // but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N - publicInputs[1] = bytes32(header.globalVariables.blockNumber + 1); - - publicInputs[2] = vkTreeRoot; - - bytes32[] memory headerFields = HeaderLib.toFields(header); - for (uint256 i = 0; i < headerFields.length; i++) { - publicInputs[i + 3] = headerFields[i]; - } - - // the block proof is recursive, which means it comes with an aggregation object - // this snippet copies it into the public inputs needed for verification - // it also guards against empty _aggregationObject used with mocked proofs - uint256 aggregationLength = _aggregationObject.length / 32; - for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) { - bytes32 part; - assembly { - part := calldataload(add(_aggregationObject.offset, mul(i, 32))) - } - publicInputs[i + 3 + Constants.HEADER_LENGTH] = part; - } - - if (!verifier.verify(_proof, publicInputs)) { - revert Errors.Rollup__InvalidProof(); - } - archive = _archive; lastBlockTs = block.timestamp; diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index a53fb47bcb9..410a0017714 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -5,12 +5,7 @@ pragma solidity >=0.8.18; interface IRollup { event L2BlockProcessed(uint256 indexed blockNumber); - function process( - bytes calldata _header, - bytes32 _archive, - bytes calldata _aggregationObject, - bytes memory _proof - ) external; + function process(bytes calldata _header, bytes32 _archive) external; function setVerifier(address _verifier) external; } diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 50f7d7f66d7..a9af9338165 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -85,7 +85,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 0x420, 31337)); - rollup.process(header, archive, bytes(""), bytes("")); + rollup.process(header, archive); } function testRevertInvalidVersion() public { @@ -101,7 +101,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 0x420, 1)); - rollup.process(header, archive, bytes(""), bytes("")); + rollup.process(header, archive); } function testRevertTimestampInFuture() public { @@ -118,7 +118,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampInFuture.selector)); - rollup.process(header, archive, bytes(""), bytes("")); + rollup.process(header, archive); } function testRevertTimestampTooOld() public { @@ -133,7 +133,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampTooOld.selector)); - rollup.process(header, archive, bytes(""), bytes("")); + rollup.process(header, archive); } function _testBlock(string memory name) public { @@ -153,7 +153,7 @@ contract RollupTest is DecoderBase { uint256 toConsume = inbox.toConsume(); vm.record(); - rollup.process(header, archive, bytes(""), bytes("")); + rollup.process(header, archive); assertEq(inbox.toConsume(), toConsume + 1, "Message subtree not consumed"); diff --git a/yarn-project/accounts/package.local.json b/yarn-project/accounts/package.local.json index 6e3a34a9358..d442f6c8af9 100644 --- a/yarn-project/accounts/package.local.json +++ b/yarn-project/accounts/package.local.json @@ -7,10 +7,5 @@ "build:ts": "tsc -b", "clean": "rm -rf ./dest .tsbuildinfo ./artifacts" }, - "files": [ - "dest", - "src", - "artifacts", - "!*.test.*" - ] -} \ No newline at end of file + "files": ["dest", "src", "artifacts", "!*.test.*"] +} diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 1040f308fae..ea7cbba1883 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -248,12 +248,10 @@ function makeMessageSentEvent(l1BlockNum: bigint, l2BlockNumber: bigint, index: function makeRollupTx(l2Block: L2Block) { const header = toHex(l2Block.header.toBuffer()); const archive = toHex(l2Block.archive.root.toBuffer()); - const aggregationObject = `0x`; - const proof = `0x`; const input = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [header, archive, aggregationObject, proof], + args: [header, archive], }); return { input } as Transaction; } diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index 5fe3fb4d08b..00d0b6d4bef 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -91,7 +91,7 @@ async function getBlockMetadataFromRollupTx( if (functionName !== 'process') { throw new Error(`Unexpected method called ${functionName}`); } - const [headerHex, archiveRootHex] = args! as readonly [Hex, Hex, Hex, Hex]; + const [headerHex, archiveRootHex] = args! as readonly [Hex, Hex]; const header = Header.fromBuffer(Buffer.from(hexToBytes(headerHex))); diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index c797483e372..9c0ee2dfaa4 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -27,9 +27,7 @@ import { MAX_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - type Proof, PublicDataUpdateRequest, - makeEmptyProof, } from '@aztec/circuits.js'; import { fr, makeProof } from '@aztec/circuits.js/testing'; import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum'; @@ -89,7 +87,6 @@ describe('L1Publisher integration', () => { let outbox: GetContractReturnType>; let publisher: L1Publisher; - let l2Proof: Proof; let builder: TxProver; let builderDb: MerkleTrees; @@ -147,7 +144,6 @@ describe('L1Publisher integration', () => { const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig); await worldStateSynchronizer.start(); builder = await TxProver.new(config, worldStateSynchronizer, new NoopTelemetryClient()); - l2Proof = makeEmptyProof(); publisher = getL1Publisher({ rpcUrl: config.rpcUrl, @@ -411,7 +407,7 @@ describe('L1Publisher integration', () => { writeJson(`mixed_block_${i}`, block, l1ToL2Content, recipientAddress, deployerAccount.address); - await publisher.processL2Block(block, [], l2Proof); + await publisher.processL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, @@ -431,12 +427,7 @@ describe('L1Publisher integration', () => { const expectedData = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [ - `0x${block.header.toBuffer().toString('hex')}`, - `0x${block.archive.root.toBuffer().toString('hex')}`, - `0x`, // empty aggregation object - `0x${l2Proof.withoutPublicInputs().toString('hex')}`, - ], + args: [`0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`], }); expect(ethTx.input).toEqual(expectedData); @@ -501,7 +492,7 @@ describe('L1Publisher integration', () => { writeJson(`empty_block_${i}`, block, [], AztecAddress.ZERO, deployerAccount.address); - await publisher.processL2Block(block, [], l2Proof); + await publisher.processL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, @@ -521,12 +512,7 @@ describe('L1Publisher integration', () => { const expectedData = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [ - `0x${block.header.toBuffer().toString('hex')}`, - `0x${block.archive.root.toBuffer().toString('hex')}`, - `0x`, // empty aggregation object - `0x${l2Proof.withoutPublicInputs().toString('hex')}`, - ], + args: [`0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`], }); expect(ethTx.input).toEqual(expectedData); } diff --git a/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts b/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts index 85fe77ace57..54a88297fe1 100644 --- a/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts @@ -3,7 +3,7 @@ import { BBCircuitVerifier } from '@aztec/bb-prover'; import { AGGREGATION_OBJECT_LENGTH, Fr, HEADER_LENGTH, Proof } from '@aztec/circuits.js'; import { type L1ContractAddresses } from '@aztec/ethereum'; import { type Logger } from '@aztec/foundation/log'; -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { BufferReader } from '@aztec/foundation/serialize'; import { AvailabilityOracleAbi, RollupAbi } from '@aztec/l1-artifacts'; import { type Anvil } from '@viem/anvil'; @@ -157,7 +157,7 @@ describe('proof_verification', () => { }); }); - describe('Rollup', () => { + describe.skip('Rollup', () => { let availabilityContract: GetContractReturnType; let rollupContract: GetContractReturnType; @@ -183,8 +183,6 @@ describe('proof_verification', () => { const args = [ `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, - `0x${serializeToBuffer(aggregationObject).toString('hex')}`, - `0x${proof.withoutPublicInputs().toString('hex')}`, ] as const; await expect(rollupContract.write.process(args)).resolves.toBeDefined(); diff --git a/yarn-project/noir-protocol-circuits-types/package.local.json b/yarn-project/noir-protocol-circuits-types/package.local.json index e3deaa112a7..d496e249715 100644 --- a/yarn-project/noir-protocol-circuits-types/package.local.json +++ b/yarn-project/noir-protocol-circuits-types/package.local.json @@ -3,10 +3,5 @@ "build": "yarn clean && yarn generate && tsc -b", "clean": "rm -rf ./dest .tsbuildinfo src/types artifacts" }, - "files": [ - "dest", - "src", - "artifacts", - "!*.test.*" - ] -} \ No newline at end of file + "files": ["dest", "src", "artifacts", "!*.test.*"] +} diff --git a/yarn-project/protocol-contracts/package.local.json b/yarn-project/protocol-contracts/package.local.json index 6e3a34a9358..d442f6c8af9 100644 --- a/yarn-project/protocol-contracts/package.local.json +++ b/yarn-project/protocol-contracts/package.local.json @@ -7,10 +7,5 @@ "build:ts": "tsc -b", "clean": "rm -rf ./dest .tsbuildinfo ./artifacts" }, - "files": [ - "dest", - "src", - "artifacts", - "!*.test.*" - ] -} \ No newline at end of file + "files": ["dest", "src", "artifacts", "!*.test.*"] +} diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 2e301e40e3d..747b7caa569 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -56,7 +56,7 @@ describe('L1Publisher', () => { }); it('publishes l2 block to l1', async () => { - const result = await publisher.processL2Block(l2Block, [], makeEmptyProof()); + const result = await publisher.processL2Block(l2Block); expect(result).toEqual(true); expect(txSender.sendProcessTx).toHaveBeenCalledWith({ @@ -71,7 +71,7 @@ describe('L1Publisher', () => { it('does not publish if last archive root is different to expected', async () => { txSender.getCurrentArchive.mockResolvedValueOnce(L2Block.random(43).archive.root.toBuffer()); - const result = await publisher.processL2Block(l2Block, [], makeEmptyProof()); + const result = await publisher.processL2Block(l2Block); expect(result).toBe(false); expect(txSender.sendPublishTx).not.toHaveBeenCalled(); expect(txSender.sendProcessTx).not.toHaveBeenCalled(); @@ -80,7 +80,7 @@ describe('L1Publisher', () => { it('does not retry if sending a publish tx fails', async () => { txSender.sendPublishTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(publishTxHash); - const result = await publisher.processL2Block(l2Block, [], makeEmptyProof()); + const result = await publisher.processL2Block(l2Block); expect(result).toEqual(false); expect(txSender.sendPublishTx).toHaveBeenCalledTimes(1); @@ -90,7 +90,7 @@ describe('L1Publisher', () => { it('does not retry if sending a process tx fails', async () => { txSender.sendProcessTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(processTxHash); - const result = await publisher.processL2Block(l2Block, [], makeEmptyProof()); + const result = await publisher.processL2Block(l2Block); expect(result).toEqual(false); expect(txSender.sendPublishTx).toHaveBeenCalledTimes(1); @@ -105,7 +105,7 @@ describe('L1Publisher', () => { .mockRejectedValueOnce(new Error()) .mockResolvedValueOnce(processTxReceipt); - const result = await publisher.processL2Block(l2Block, [], makeEmptyProof()); + const result = await publisher.processL2Block(l2Block); expect(result).toEqual(true); expect(txSender.getTransactionReceipt).toHaveBeenCalledTimes(4); @@ -114,7 +114,7 @@ describe('L1Publisher', () => { it('returns false if publish tx reverts', async () => { txSender.getTransactionReceipt.mockReset().mockResolvedValueOnce({ ...publishTxReceipt, status: false }); - const result = await publisher.processL2Block(l2Block, [], makeEmptyProof()); + const result = await publisher.processL2Block(l2Block); expect(result).toEqual(false); }); @@ -125,7 +125,7 @@ describe('L1Publisher', () => { .mockResolvedValueOnce(publishTxReceipt) .mockResolvedValueOnce({ ...publishTxReceipt, status: false }); - const result = await publisher.processL2Block(l2Block, [], makeEmptyProof()); + const result = await publisher.processL2Block(l2Block); expect(result).toEqual(false); }); @@ -133,7 +133,7 @@ describe('L1Publisher', () => { it('returns false if sending publish tx is interrupted', async () => { txSender.sendPublishTx.mockReset().mockImplementationOnce(() => sleep(10, publishTxHash)); - const resultPromise = publisher.processL2Block(l2Block, [], makeEmptyProof()); + const resultPromise = publisher.processL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; @@ -144,7 +144,7 @@ describe('L1Publisher', () => { it('returns false if sending process tx is interrupted', async () => { txSender.sendProcessTx.mockReset().mockImplementationOnce(() => sleep(10, processTxHash)); - const resultPromise = publisher.processL2Block(l2Block, [], makeEmptyProof()); + const resultPromise = publisher.processL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 9cc82d3b6e2..6e0f4444c5a 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,8 +1,7 @@ import { type L2Block } from '@aztec/circuit-types'; import { type L1PublishStats } from '@aztec/circuit-types/stats'; -import { type EthAddress, type Fr, type Proof } from '@aztec/circuits.js'; +import { type EthAddress } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; -import { serializeToBuffer } from '@aztec/foundation/serialize'; import { InterruptibleSleep } from '@aztec/foundation/sleep'; import pick from 'lodash.pick'; @@ -97,10 +96,6 @@ export type L1ProcessArgs = { archive: Buffer; /** L2 block body. */ body: Buffer; - /** Aggregation object needed to verify the proof */ - aggregationObject: Buffer; - /** Root rollup proof of the L2 block. */ - proof: Buffer; }; /** @@ -132,7 +127,7 @@ export class L1Publisher implements L2BlockReceiver { * @param block - L2 block to publish. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ - public async processL2Block(block: L2Block, aggregationObject: Fr[], proof: Proof): Promise { + public async processL2Block(block: L2Block): Promise { // TODO(#4148) Remove this block number check, it's here because we don't currently have proper genesis state on the contract const lastArchive = block.header.lastArchive.root.toBuffer(); if (block.number != 1 && !(await this.checkLastArchiveHash(lastArchive))) { @@ -180,8 +175,6 @@ export class L1Publisher implements L2BlockReceiver { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), body: encodedBody, - aggregationObject: serializeToBuffer(aggregationObject), - proof: proof.withoutPublicInputs(), }; // Process block diff --git a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts index c630f043638..2bd1d9cb275 100644 --- a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts +++ b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts @@ -157,12 +157,7 @@ export class ViemTxSender implements L1PublisherTxSender { * @returns The hash of the mined tx. */ async sendProcessTx(encodedData: ProcessTxArgs): Promise { - const args = [ - `0x${encodedData.header.toString('hex')}`, - `0x${encodedData.archive.toString('hex')}`, - `0x${encodedData.aggregationObject.toString('hex')}`, - `0x${encodedData.proof.toString('hex')}`, - ] as const; + const args = [`0x${encodedData.header.toString('hex')}`, `0x${encodedData.archive.toString('hex')}`] as const; const gas = await this.rollupContract.estimateGas.process(args, { account: this.account, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index fb20cb3507f..3c730e7fd6a 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -294,7 +294,7 @@ export class Sequencer { await assertBlockHeight(); // Block is proven, now finalise and publish! - const { block, aggregationObject, proof } = await this.prover.finaliseBlock(); + const { block } = await this.prover.finaliseBlock(); await assertBlockHeight(); @@ -306,7 +306,7 @@ export class Sequencer { ...block.getStats(), } satisfies L2BlockBuiltStats); - await this.publishL2Block(block, aggregationObject, proof); + await this.publishL2Block(block); this.log.info(`Submitted rollup block ${block.number} with ${processedTxs.length} transactions`); } @@ -317,10 +317,10 @@ export class Sequencer { @trackSpan('Sequencer.publishL2Block', block => ({ [Attributes.BLOCK_NUMBER]: block.number, })) - protected async publishL2Block(block: L2Block, aggregationObject: Fr[], proof: Proof) { + protected async publishL2Block(block: L2Block) { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - const publishedL2Block = await this.publisher.processL2Block(block, aggregationObject, proof); + const publishedL2Block = await this.publisher.processL2Block(block); if (publishedL2Block) { this.lastPublishedBlock = block.number; } else {