Skip to content

Commit

Permalink
feat: Remove proof from L1 Rollup process
Browse files Browse the repository at this point in the history
Batch proof for multiple blocks will be uploaded separately.

Related to #7346
  • Loading branch information
spalladino committed Jul 4, 2024
1 parent 79e5856 commit b064eea
Show file tree
Hide file tree
Showing 15 changed files with 38 additions and 125 deletions.
39 changes: 1 addition & 38 deletions l1-contracts/src/core/Rollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -89,14 +89,8 @@ contract Rollup is IRollup {
* @notice Process an incoming L2 block and progress the state
* @param _header - The L2 block header
* @param _archive - A root of the archive tree after the L2 block is applied
* @param _proof - The proof of correct execution
*/
function process(
bytes calldata _header,
bytes32 _archive,
bytes calldata _aggregationObject,
bytes calldata _proof
) external override(IRollup) {
function process(bytes calldata _header, bytes32 _archive) external override(IRollup) {
// Decode and validate header
HeaderLib.Header memory header = HeaderLib.decode(_header);
HeaderLib.validate(header, VERSION, lastBlockTs, archive);
Expand All @@ -111,37 +105,6 @@ contract Rollup is IRollup {
if (sequencer != address(0x0) && sequencer != msg.sender) {
revert Errors.Rollup__InvalidSequencer(msg.sender);
}

bytes32[] memory publicInputs =
new bytes32[](2 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH);
// the archive tree root
publicInputs[0] = _archive;
// this is the _next_ available leaf in the archive tree
// normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed)
// but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N
publicInputs[1] = bytes32(header.globalVariables.blockNumber + 1);

bytes32[] memory headerFields = HeaderLib.toFields(header);
for (uint256 i = 0; i < headerFields.length; i++) {
publicInputs[i + 2] = headerFields[i];
}

// the block proof is recursive, which means it comes with an aggregation object
// this snippet copies it into the public inputs needed for verification
// it also guards against empty _aggregationObject used with mocked proofs
uint256 aggregationLength = _aggregationObject.length / 32;
for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) {
bytes32 part;
assembly {
part := calldataload(add(_aggregationObject.offset, mul(i, 32)))
}
publicInputs[i + 2 + Constants.HEADER_LENGTH] = part;
}

if (!verifier.verify(_proof, publicInputs)) {
revert Errors.Rollup__InvalidProof();
}

archive = _archive;
lastBlockTs = block.timestamp;

Expand Down
7 changes: 1 addition & 6 deletions l1-contracts/src/core/interfaces/IRollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,7 @@ pragma solidity >=0.8.18;
interface IRollup {
event L2BlockProcessed(uint256 indexed blockNumber);

function process(
bytes calldata _header,
bytes32 _archive,
bytes calldata _aggregationObject,
bytes memory _proof
) external;
function process(bytes calldata _header, bytes32 _archive) external;

function setVerifier(address _verifier) external;
}
10 changes: 5 additions & 5 deletions l1-contracts/test/Rollup.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 0x420, 31337));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function testRevertInvalidVersion() public {
Expand All @@ -101,7 +101,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 0x420, 1));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function testRevertTimestampInFuture() public {
Expand All @@ -118,7 +118,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampInFuture.selector));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function testRevertTimestampTooOld() public {
Expand All @@ -133,7 +133,7 @@ contract RollupTest is DecoderBase {
availabilityOracle.publish(body);

vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampTooOld.selector));
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);
}

function _testBlock(string memory name) public {
Expand All @@ -153,7 +153,7 @@ contract RollupTest is DecoderBase {
uint256 toConsume = inbox.toConsume();

vm.record();
rollup.process(header, archive, bytes(""), bytes(""));
rollup.process(header, archive);

assertEq(inbox.toConsume(), toConsume + 1, "Message subtree not consumed");

Expand Down
9 changes: 2 additions & 7 deletions yarn-project/accounts/package.local.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,5 @@
"build:ts": "tsc -b",
"clean": "rm -rf ./dest .tsbuildinfo ./artifacts"
},
"files": [
"dest",
"src",
"artifacts",
"!*.test.*"
]
}
"files": ["dest", "src", "artifacts", "!*.test.*"]
}
4 changes: 1 addition & 3 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -248,12 +248,10 @@ function makeMessageSentEvent(l1BlockNum: bigint, l2BlockNumber: bigint, index:
function makeRollupTx(l2Block: L2Block) {
const header = toHex(l2Block.header.toBuffer());
const archive = toHex(l2Block.archive.root.toBuffer());
const aggregationObject = `0x`;
const proof = `0x`;
const input = encodeFunctionData({
abi: RollupAbi,
functionName: 'process',
args: [header, archive, aggregationObject, proof],
args: [header, archive],
});
return { input } as Transaction<bigint, number>;
}
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/archiver/src/archiver/eth_log_handlers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ async function getBlockMetadataFromRollupTx(
if (functionName !== 'process') {
throw new Error(`Unexpected method called ${functionName}`);
}
const [headerHex, archiveRootHex] = args! as readonly [Hex, Hex, Hex, Hex];
const [headerHex, archiveRootHex] = args! as readonly [Hex, Hex];

const header = Header.fromBuffer(Buffer.from(hexToBytes(headerHex)));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,8 @@ import {
MAX_NULLIFIERS_PER_TX,
MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
type Proof,
PublicDataUpdateRequest,
getMockVerificationKeys,
makeEmptyProof,
} from '@aztec/circuits.js';
import { fr, makeProof } from '@aztec/circuits.js/testing';
import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum';
Expand Down Expand Up @@ -89,7 +87,6 @@ describe('L1Publisher integration', () => {
let outbox: GetContractReturnType<typeof OutboxAbi, PublicClient<HttpTransport, Chain>>;

let publisher: L1Publisher;
let l2Proof: Proof;

let builder: TxProver;
let builderDb: MerkleTrees;
Expand Down Expand Up @@ -147,7 +144,6 @@ describe('L1Publisher integration', () => {
const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig);
await worldStateSynchronizer.start();
builder = await TxProver.new(config, getMockVerificationKeys(), worldStateSynchronizer, new NoopTelemetryClient());
l2Proof = makeEmptyProof();

publisher = getL1Publisher({
rpcUrl: config.rpcUrl,
Expand Down Expand Up @@ -405,7 +401,7 @@ describe('L1Publisher integration', () => {

writeJson(`mixed_block_${i}`, block, l1ToL2Content, recipientAddress, deployerAccount.address);

await publisher.processL2Block(block, [], l2Proof);
await publisher.processL2Block(block);

const logs = await publicClient.getLogs({
address: rollupAddress,
Expand All @@ -425,12 +421,7 @@ describe('L1Publisher integration', () => {
const expectedData = encodeFunctionData({
abi: RollupAbi,
functionName: 'process',
args: [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x`, // empty aggregation object
`0x${l2Proof.withoutPublicInputs().toString('hex')}`,
],
args: [`0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`],
});
expect(ethTx.input).toEqual(expectedData);

Expand Down Expand Up @@ -495,7 +486,7 @@ describe('L1Publisher integration', () => {

writeJson(`empty_block_${i}`, block, [], AztecAddress.ZERO, deployerAccount.address);

await publisher.processL2Block(block, [], l2Proof);
await publisher.processL2Block(block);

const logs = await publicClient.getLogs({
address: rollupAddress,
Expand All @@ -515,12 +506,7 @@ describe('L1Publisher integration', () => {
const expectedData = encodeFunctionData({
abi: RollupAbi,
functionName: 'process',
args: [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x`, // empty aggregation object
`0x${l2Proof.withoutPublicInputs().toString('hex')}`,
],
args: [`0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`],
});
expect(ethTx.input).toEqual(expectedData);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { BBCircuitVerifier } from '@aztec/bb-prover';
import { AGGREGATION_OBJECT_LENGTH, Fr, HEADER_LENGTH, Proof } from '@aztec/circuits.js';
import { type L1ContractAddresses } from '@aztec/ethereum';
import { type Logger } from '@aztec/foundation/log';
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
import { BufferReader } from '@aztec/foundation/serialize';
import { AvailabilityOracleAbi, RollupAbi } from '@aztec/l1-artifacts';

import { type Anvil } from '@viem/anvil';
Expand Down Expand Up @@ -157,7 +157,7 @@ describe('proof_verification', () => {
});
});

describe('Rollup', () => {
describe.skip('Rollup', () => {
let availabilityContract: GetContractReturnType<typeof AvailabilityOracleAbi, typeof walletClient>;
let rollupContract: GetContractReturnType<typeof RollupAbi, typeof walletClient>;

Expand All @@ -183,8 +183,6 @@ describe('proof_verification', () => {
const args = [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x${serializeToBuffer(aggregationObject).toString('hex')}`,
`0x${proof.withoutPublicInputs().toString('hex')}`,
] as const;

await expect(rollupContract.write.process(args)).resolves.toBeDefined();
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/noir-contracts.js/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -74,4 +74,4 @@
"engines": {
"node": ">=18"
}
}
}
9 changes: 2 additions & 7 deletions yarn-project/noir-protocol-circuits-types/package.local.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,5 @@
"build": "yarn clean && yarn generate && tsc -b",
"clean": "rm -rf ./dest .tsbuildinfo src/types artifacts"
},
"files": [
"dest",
"src",
"artifacts",
"!*.test.*"
]
}
"files": ["dest", "src", "artifacts", "!*.test.*"]
}
9 changes: 2 additions & 7 deletions yarn-project/protocol-contracts/package.local.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,5 @@
"build:ts": "tsc -b",
"clean": "rm -rf ./dest .tsbuildinfo ./artifacts"
},
"files": [
"dest",
"src",
"artifacts",
"!*.test.*"
]
}
"files": ["dest", "src", "artifacts", "!*.test.*"]
}
18 changes: 9 additions & 9 deletions yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ describe('L1Publisher', () => {
});

it('publishes l2 block to l1', async () => {
const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(true);
expect(txSender.sendProcessTx).toHaveBeenCalledWith({ header, archive, body, aggregationObject, proof });
Expand All @@ -65,7 +65,7 @@ describe('L1Publisher', () => {

it('does not publish if last archive root is different to expected', async () => {
txSender.getCurrentArchive.mockResolvedValueOnce(L2Block.random(43).archive.root.toBuffer());
const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);
expect(result).toBe(false);
expect(txSender.sendPublishTx).not.toHaveBeenCalled();
expect(txSender.sendProcessTx).not.toHaveBeenCalled();
Expand All @@ -74,7 +74,7 @@ describe('L1Publisher', () => {
it('does not retry if sending a publish tx fails', async () => {
txSender.sendPublishTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(publishTxHash);

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
expect(txSender.sendPublishTx).toHaveBeenCalledTimes(1);
Expand All @@ -84,7 +84,7 @@ describe('L1Publisher', () => {
it('does not retry if sending a process tx fails', async () => {
txSender.sendProcessTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(processTxHash);

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
expect(txSender.sendPublishTx).toHaveBeenCalledTimes(1);
Expand All @@ -99,7 +99,7 @@ describe('L1Publisher', () => {
.mockRejectedValueOnce(new Error())
.mockResolvedValueOnce(processTxReceipt);

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(true);
expect(txSender.getTransactionReceipt).toHaveBeenCalledTimes(4);
Expand All @@ -108,7 +108,7 @@ describe('L1Publisher', () => {
it('returns false if publish tx reverts', async () => {
txSender.getTransactionReceipt.mockReset().mockResolvedValueOnce({ ...publishTxReceipt, status: false });

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
});
Expand All @@ -119,15 +119,15 @@ describe('L1Publisher', () => {
.mockResolvedValueOnce(publishTxReceipt)
.mockResolvedValueOnce({ ...publishTxReceipt, status: false });

const result = await publisher.processL2Block(l2Block, [], makeEmptyProof());
const result = await publisher.processL2Block(l2Block);

expect(result).toEqual(false);
});

it('returns false if sending publish tx is interrupted', async () => {
txSender.sendPublishTx.mockReset().mockImplementationOnce(() => sleep(10, publishTxHash));

const resultPromise = publisher.processL2Block(l2Block, [], makeEmptyProof());
const resultPromise = publisher.processL2Block(l2Block);
publisher.interrupt();
const result = await resultPromise;

Expand All @@ -138,7 +138,7 @@ describe('L1Publisher', () => {
it('returns false if sending process tx is interrupted', async () => {
txSender.sendProcessTx.mockReset().mockImplementationOnce(() => sleep(10, processTxHash));

const resultPromise = publisher.processL2Block(l2Block, [], makeEmptyProof());
const resultPromise = publisher.processL2Block(l2Block);
publisher.interrupt();
const result = await resultPromise;

Expand Down
Loading

0 comments on commit b064eea

Please sign in to comment.