Skip to content

Commit

Permalink
feat: Remove proof from L1 Rollup process
Browse files Browse the repository at this point in the history
Batch proof for multiple blocks will be uploaded separately.

Related to #7346
  • Loading branch information
spalladino committed Jul 8, 2024
1 parent 9f9ded2 commit f54c3cb
Show file tree
Hide file tree
Showing 14 changed files with 37 additions and 125 deletions.
40 changes: 1 addition & 39 deletions l1-contracts/src/core/Rollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -101,14 +101,8 @@ contract Rollup is IRollup {
* @notice Process an incoming L2 block and progress the state
* @param _header - The L2 block header
* @param _archive - A root of the archive tree after the L2 block is applied
* @param _proof - The proof of correct execution
*/
function process(
bytes calldata _header,
bytes32 _archive,
bytes calldata _aggregationObject,
bytes calldata _proof
) external override(IRollup) {
function process(bytes calldata _header, bytes32 _archive) external override(IRollup) {
// Decode and validate header
HeaderLib.Header memory header = HeaderLib.decode(_header);
HeaderLib.validate(header, VERSION, lastBlockTs, archive);
Expand All @@ -124,38 +118,6 @@ contract Rollup is IRollup {
revert Errors.Rollup__InvalidSequencer(msg.sender);
}

bytes32[] memory publicInputs =
new bytes32[](3 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH);
// the archive tree root
publicInputs[0] = _archive;
// this is the _next_ available leaf in the archive tree
// normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed)
// but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N
publicInputs[1] = bytes32(header.globalVariables.blockNumber + 1);

publicInputs[2] = vkTreeRoot;

bytes32[] memory headerFields = HeaderLib.toFields(header);
for (uint256 i = 0; i < headerFields.length; i++) {
publicInputs[i + 3] = headerFields[i];
}

// the block proof is recursive, which means it comes with an aggregation object
// this snippet copies it into the public inputs needed for verification
// it also guards against empty _aggregationObject used with mocked proofs
uint256 aggregationLength = _aggregationObject.length / 32;
for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) {
bytes32 part;
assembly {
part := calldataload(add(_aggregationObject.offset, mul(i, 32)))
}
publicInputs[i + 3 + Constants.HEADER_LENGTH] = part;
}

if (!verifier.verify(_proof, publicInputs)) {
revert Errors.Rollup__InvalidProof();
}

archive = _archive;
lastBlockTs = block.timestamp;

Expand Down
7 changes: 1 addition & 6 deletions l1-contracts/src/core/interfaces/IRollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,7 @@ pragma solidity >=0.8.18;
interface IRollup {
event L2BlockProcessed(uint256 indexed blockNumber);

function process(
bytes calldata _header,
bytes32 _archive,
bytes calldata _aggregationObject,
bytes memory _proof
) external;
function process(bytes calldata _header, bytes32 _archive) external;

function setVerifier(address _verifier) external;
}
10 changes: 5 additions & 5 deletions l1-contracts/test/Rollup.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 0x420, 31337));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function testRevertInvalidVersion() public {
Expand All @@ -101,7 +101,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 0x420, 1));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function testRevertTimestampInFuture() public {
Expand All @@ -118,7 +118,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampInFuture.selector));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function testRevertTimestampTooOld() public {
Expand All @@ -133,7 +133,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampTooOld.selector));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function _testBlock(string memory name) public {
Expand All @@ -153,7 +153,7 @@ contract RollupTest is DecoderBase {
uint256 toConsume = inbox.toConsume();

vm.record();
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);

assertEq(inbox.toConsume(), toConsume + 1, "Message subtree not consumed");

Expand Down
9 changes: 2 additions & 7 deletions yarn-project/accounts/package.local.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,5 @@
"build:ts": "tsc -b",
"clean": "rm -rf ./dest .tsbuildinfo ./artifacts"
},
"files": [
"dest",
"src",
"artifacts",
"!*.test.*"
]
}
"files": ["dest", "src", "artifacts", "!*.test.*"]
}
4 changes: 1 addition & 3 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -248,12 +248,10 @@ function makeMessageSentEvent(l1BlockNum: bigint, l2BlockNumber: bigint, index:
function makeRollupTx(l2Block: L2Block) {
const header = toHex(l2Block.header.toBuffer());
const archive = toHex(l2Block.archive.root.toBuffer());
const aggregationObject = `0x`;
const proof = `0x`;
const input = encodeFunctionData({
abi: RollupAbi,
functionName: 'process',
args: [header, archive, aggregationObject, proof],
args: [header, archive],
});
return { input } as Transaction<bigint, number>;
}
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/archiver/src/archiver/eth_log_handlers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ async function getBlockMetadataFromRollupTx(
if (functionName !== 'process') {
throw new Error(`Unexpected method called ${functionName}`);
}
const [headerHex, archiveRootHex] = args! as readonly [Hex, Hex, Hex, Hex];
const [headerHex, archiveRootHex] = args! as readonly [Hex, Hex];

const header = Header.fromBuffer(Buffer.from(hexToBytes(headerHex)));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,7 @@ import {
MAX_NULLIFIERS_PER_TX,
MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
type Proof,
PublicDataUpdateRequest,
makeEmptyProof,
} from '@aztec/circuits.js';
import { fr, makeProof } from '@aztec/circuits.js/testing';
import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum';
Expand Down Expand Up @@ -89,7 +87,6 @@ describe('L1Publisher integration', () => {
let outbox: GetContractReturnType<typeof OutboxAbi, PublicClient<HttpTransport, Chain>>;

let publisher: L1Publisher;
let l2Proof: Proof;

let builder: TxProver;
let builderDb: MerkleTrees;
Expand Down Expand Up @@ -147,7 +144,6 @@ describe('L1Publisher integration', () => {
const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig);
await worldStateSynchronizer.start();
builder = await TxProver.new(config, worldStateSynchronizer, new NoopTelemetryClient());
l2Proof = makeEmptyProof();

publisher = getL1Publisher({
rpcUrl: config.rpcUrl,
Expand Down Expand Up @@ -411,7 +407,7 @@ describe('L1Publisher integration', () => {

writeJson(`mixed_block_${i}`, block, l1ToL2Content, recipientAddress, deployerAccount.address);

await publisher.processL2Block(block, [], l2Proof);
await publisher.processL2Block(block);

const logs = await publicClient.getLogs({
address: rollupAddress,
Expand All @@ -431,12 +427,7 @@ describe('L1Publisher integration', () => {
const expectedData = encodeFunctionData({
abi: RollupAbi,
functionName: 'process',
args: [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x`, // empty aggregation object
`0x${l2Proof.withoutPublicInputs().toString('hex')}`,
],
args: [`0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`],
});
expect(ethTx.input).toEqual(expectedData);

Expand Down Expand Up @@ -501,7 +492,7 @@ describe('L1Publisher integration', () => {

writeJson(`empty_block_${i}`, block, [], AztecAddress.ZERO, deployerAccount.address);

await publisher.processL2Block(block, [], l2Proof);
await publisher.processL2Block(block);

const logs = await publicClient.getLogs({
address: rollupAddress,
Expand All @@ -521,12 +512,7 @@ describe('L1Publisher integration', () => {
const expectedData = encodeFunctionData({
abi: RollupAbi,
functionName: 'process',
args: [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x`, // empty aggregation object
`0x${l2Proof.withoutPublicInputs().toString('hex')}`,
],
args: [`0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`],
});
expect(ethTx.input).toEqual(expectedData);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { BBCircuitVerifier } from '@aztec/bb-prover';
import { AGGREGATION_OBJECT_LENGTH, Fr, HEADER_LENGTH, Proof } from '@aztec/circuits.js';
import { type L1ContractAddresses } from '@aztec/ethereum';
import { type Logger } from '@aztec/foundation/log';
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
import { BufferReader } from '@aztec/foundation/serialize';
import { AvailabilityOracleAbi, RollupAbi } from '@aztec/l1-artifacts';

import { type Anvil } from '@viem/anvil';
Expand Down Expand Up @@ -157,7 +157,7 @@ describe('proof_verification', () => {
});
});

describe('Rollup', () => {
describe.skip('Rollup', () => {
let availabilityContract: GetContractReturnType<typeof AvailabilityOracleAbi, typeof walletClient>;
let rollupContract: GetContractReturnType<typeof RollupAbi, typeof walletClient>;

Expand All @@ -183,8 +183,6 @@ describe('proof_verification', () => {
const args = [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x${serializeToBuffer(aggregationObject).toString('hex')}`,
`0x${proof.withoutPublicInputs().toString('hex')}`,
] as const;

await expect(rollupContract.write.process(args)).resolves.toBeDefined();
Expand Down
9 changes: 2 additions & 7 deletions yarn-project/noir-protocol-circuits-types/package.local.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,5 @@
"build": "yarn clean && yarn generate && tsc -b",
"clean": "rm -rf ./dest .tsbuildinfo src/types artifacts"
},
"files": [
"dest",
"src",
"artifacts",
"!*.test.*"
]
}
"files": ["dest", "src", "artifacts", "!*.test.*"]
}
9 changes: 2 additions & 7 deletions yarn-project/protocol-contracts/package.local.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,5 @@
"build:ts": "tsc -b",
"clean": "rm -rf ./dest .tsbuildinfo ./artifacts"
},
"files": [
"dest",
"src",
"artifacts",
"!*.test.*"
]
}
"files": ["dest", "src", "artifacts", "!*.test.*"]
}
18 changes: 9 additions & 9 deletions yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ describe('L1Publisher', () => {
});

it('publishes l2 block to l1', async () => {
const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(true);
expect(txSender.sendProcessTx).toHaveBeenCalledWith({
Expand All @@ -71,7 +71,7 @@ describe('L1Publisher', () => {

it('does not publish if last archive root is different to expected', async () => {
txSender.getCurrentArchive.mockResolvedValueOnce(L2Block.random(43).archive.root.toBuffer());
const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);
expect(result).toBe(false);
expect(txSender.sendPublishTx).not.toHaveBeenCalled();
expect(txSender.sendProcessTx).not.toHaveBeenCalled();
Expand All @@ -80,7 +80,7 @@ describe('L1Publisher', () => {
it('does not retry if sending a publish tx fails', async () => {
txSender.sendPublishTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(publishTxHash);

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
expect(txSender.sendPublishTx).toHaveBeenCalledTimes(1);
Expand All @@ -90,7 +90,7 @@ describe('L1Publisher', () => {
it('does not retry if sending a process tx fails', async () => {
txSender.sendProcessTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(processTxHash);

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
expect(txSender.sendPublishTx).toHaveBeenCalledTimes(1);
Expand All @@ -105,7 +105,7 @@ describe('L1Publisher', () => {
.mockRejectedValueOnce(new Error())
.mockResolvedValueOnce(processTxReceipt);

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(true);
expect(txSender.getTransactionReceipt).toHaveBeenCalledTimes(4);
Expand All @@ -114,7 +114,7 @@ describe('L1Publisher', () => {
it('returns false if publish tx reverts', async () => {
txSender.getTransactionReceipt.mockReset().mockResolvedValueOnce({ ...publishTxReceipt, status: false });

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
});
Expand All @@ -125,15 +125,15 @@ describe('L1Publisher', () => {
.mockResolvedValueOnce(publishTxReceipt)
.mockResolvedValueOnce({ ...publishTxReceipt, status: false });

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
});

it('returns false if sending publish tx is interrupted', async () => {
txSender.sendPublishTx.mockReset().mockImplementationOnce(() => sleep(10, publishTxHash));

const resultPromise = publisher.processL2Block(l2Block, [], makeEmptyProof());
const resultPromise = publisher.processL2Block(l2Block);
publisher.interrupt();
const result = await resultPromise;

Expand All @@ -144,7 +144,7 @@ describe('L1Publisher', () => {
it('returns false if sending process tx is interrupted', async () => {
txSender.sendProcessTx.mockReset().mockImplementationOnce(() => sleep(10, processTxHash));

const resultPromise = publisher.processL2Block(l2Block, [], makeEmptyProof());
const resultPromise = publisher.processL2Block(l2Block);
publisher.interrupt();
const result = await resultPromise;

Expand Down
11 changes: 2 additions & 9 deletions yarn-project/sequencer-client/src/publisher/l1-publisher.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import { type L2Block } from '@aztec/circuit-types';
import { type L1PublishStats } from '@aztec/circuit-types/stats';
import { type EthAddress, type Fr, type Proof } from '@aztec/circuits.js';
import { type EthAddress } from '@aztec/circuits.js';
import { createDebugLogger } from '@aztec/foundation/log';
import { serializeToBuffer } from '@aztec/foundation/serialize';
import { InterruptibleSleep } from '@aztec/foundation/sleep';

import pick from 'lodash.pick';
Expand Down Expand Up @@ -97,10 +96,6 @@ export type L1ProcessArgs = {
archive: Buffer;
/** L2 block body. */
body: Buffer;
/** Aggregation object needed to verify the proof */
aggregationObject: Buffer;
/** Root rollup proof of the L2 block. */
proof: Buffer;
};

/**
Expand Down Expand Up @@ -132,7 +127,7 @@ export class L1Publisher implements L2BlockReceiver {
* @param block - L2 block to publish.
* @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise.
*/
public async processL2Block(block: L2Block, aggregationObject: Fr[], proof: Proof): Promise<boolean> {
public async processL2Block(block: L2Block): Promise<boolean> {
// TODO(#4148) Remove this block number check, it's here because we don't currently have proper genesis state on the contract
const lastArchive = block.header.lastArchive.root.toBuffer();
if (block.number != 1 && !(await this.checkLastArchiveHash(lastArchive))) {
Expand Down Expand Up @@ -180,8 +175,6 @@ export class L1Publisher implements L2BlockReceiver {
header: block.header.toBuffer(),
archive: block.archive.root.toBuffer(),
body: encodedBody,
aggregationObject: serializeToBuffer(aggregationObject),
proof: proof.withoutPublicInputs(),
};

// Process block
Expand Down
Loading

0 comments on commit f54c3cb

Please sign in to comment.