Skip to content

Commit

Permalink
feat: Add proverId to root rollup public inputs (#7639)
Browse files Browse the repository at this point in the history
Adds a field element as a prover identifier to the public inputs of the
root rollup. This is passed directly from the root rollup inputs, and
set via a PROVER_ID env var. The prover id gets emitted as part of the
L2ProofVerified event on L1.

Eventually we may want to change the prover id into an L1 or L2 address,
but for now it's an arbitrary value.

Fixes #7670
  • Loading branch information
spalladino authored Jul 30, 2024
1 parent 729b36f commit 0120462
Show file tree
Hide file tree
Showing 24 changed files with 142 additions and 32 deletions.
9 changes: 6 additions & 3 deletions l1-contracts/src/core/Rollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -127,13 +127,14 @@ contract Rollup is Leonidas, IRollup {
function submitProof(
bytes calldata _header,
bytes32 _archive,
bytes32 _proverId,
bytes calldata _aggregationObject,
bytes calldata _proof
) external override(IRollup) {
HeaderLib.Header memory header = HeaderLib.decode(_header);

bytes32[] memory publicInputs =
new bytes32[](3 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH);
new bytes32[](4 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH);
// the archive tree root
publicInputs[0] = _archive;
// this is the _next_ available leaf in the archive tree
Expand All @@ -148,6 +149,8 @@ contract Rollup is Leonidas, IRollup {
publicInputs[i + 3] = headerFields[i];
}

publicInputs[headerFields.length + 3] = _proverId;

// the block proof is recursive, which means it comes with an aggregation object
// this snippet copies it into the public inputs needed for verification
// it also guards against empty _aggregationObject used with mocked proofs
Expand All @@ -157,14 +160,14 @@ contract Rollup is Leonidas, IRollup {
assembly {
part := calldataload(add(_aggregationObject.offset, mul(i, 32)))
}
publicInputs[i + 3 + Constants.HEADER_LENGTH] = part;
publicInputs[i + 4 + Constants.HEADER_LENGTH] = part;
}

if (!verifier.verify(_proof, publicInputs)) {
revert Errors.Rollup__InvalidProof();
}

emit L2ProofVerified(header.globalVariables.blockNumber);
emit L2ProofVerified(header.globalVariables.blockNumber, _proverId);
}

function _computePublicInputHash(bytes calldata _header, bytes32 _archive)
Expand Down
3 changes: 2 additions & 1 deletion l1-contracts/src/core/interfaces/IRollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@ pragma solidity >=0.8.18;

interface IRollup {
event L2BlockProcessed(uint256 indexed blockNumber);
event L2ProofVerified(uint256 indexed blockNumber);
event L2ProofVerified(uint256 indexed blockNumber, bytes32 indexed proverId);

function process(bytes calldata _header, bytes32 _archive) external;

function submitProof(
bytes calldata _header,
bytes32 _archive,
bytes32 _proverId,
bytes calldata _aggregationObject,
bytes calldata _proof
) external;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ struct RootRollupInputs {
// inputs required to add the block hash
start_archive_snapshot : AppendOnlyTreeSnapshot,
new_archive_sibling_path : [Field; ARCHIVE_HEIGHT],

prover_id: Field,
}

impl RootRollupInputs {
Expand Down Expand Up @@ -106,7 +108,7 @@ impl RootRollupInputs {
0
);

RootRollupPublicInputs { archive, header, vk_tree_root }
RootRollupPublicInputs { archive, header, vk_tree_root, prover_id: self.prover_id }
}
}

Expand All @@ -120,6 +122,7 @@ impl Empty for RootRollupInputs {
start_l1_to_l2_message_tree_snapshot : AppendOnlyTreeSnapshot::zero(),
start_archive_snapshot : AppendOnlyTreeSnapshot::zero(),
new_archive_sibling_path : [0; ARCHIVE_HEIGHT],
prover_id: 0
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,7 @@ struct RootRollupPublicInputs {

// New block header
header: Header,

// Identifier of the prover
prover_id: Field,
}
7 changes: 5 additions & 2 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,12 @@ describe('Archiver', () => {

let publicClient: MockProxy<PublicClient<HttpTransport, Chain>>;
let archiverStore: ArchiverDataStore;
let proverId: Fr;

beforeEach(() => {
publicClient = mock<PublicClient<HttpTransport, Chain>>();
archiverStore = new MemoryArchiverStore(1000);
proverId = Fr.random();
});

it('can start, sync and stop and handle l1 to l2 messages and logs', async () => {
Expand Down Expand Up @@ -67,7 +69,7 @@ describe('Archiver', () => {
messageSent: [makeMessageSentEvent(98n, 1n, 0n), makeMessageSentEvent(99n, 1n, 1n)],
txPublished: [makeTxsPublishedEvent(101n, blocks[0].body.getTxsEffectsHash())],
l2BlockProcessed: [makeL2BlockProcessedEvent(101n, 1n)],
proofVerified: [makeProofVerifiedEvent(102n, 1n)],
proofVerified: [makeProofVerifiedEvent(102n, 1n, proverId)],
});

mockGetLogs({
Expand Down Expand Up @@ -271,11 +273,12 @@ function makeMessageSentEvent(l1BlockNum: bigint, l2BlockNumber: bigint, index:
} as Log<bigint, number, false, undefined, true, typeof InboxAbi, 'MessageSent'>;
}

function makeProofVerifiedEvent(l1BlockNum: bigint, l2BlockNumber: bigint) {
function makeProofVerifiedEvent(l1BlockNum: bigint, l2BlockNumber: bigint, proverId: Fr) {
return {
blockNumber: l1BlockNum,
args: {
blockNumber: l2BlockNumber,
proverId: proverId.toString(),
},
} as Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2ProofVerified'>;
}
Expand Down
14 changes: 4 additions & 10 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import { type EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log';
import { RunningPromise } from '@aztec/foundation/running-promise';
import { RollupAbi } from '@aztec/l1-artifacts';
import { ClassRegistererAddress } from '@aztec/protocol-contracts/class-registerer';
import { type TelemetryClient } from '@aztec/telemetry-client';
import {
Expand All @@ -41,7 +40,7 @@ import {
} from '@aztec/types/contracts';

import groupBy from 'lodash.groupby';
import { type Chain, type HttpTransport, type PublicClient, createPublicClient, getAbiItem, http } from 'viem';
import { type Chain, type HttpTransport, type PublicClient, createPublicClient, http } from 'viem';

import { type ArchiverDataStore } from './archiver_store.js';
import { type ArchiverConfig } from './config.js';
Expand All @@ -50,6 +49,7 @@ import {
retrieveBlockBodiesFromAvailabilityOracle,
retrieveBlockMetadataFromRollup,
retrieveL1ToL2Messages,
retrieveL2ProofVerifiedEvents,
} from './data_retrieval.js';
import { ArchiverInstrumentation } from './instrumentation.js';

Expand Down Expand Up @@ -307,20 +307,14 @@ export class Archiver implements ArchiveSource {
}

private async updateLastProvenL2Block(fromBlock: bigint, toBlock: bigint) {
const logs = await this.publicClient.getLogs({
address: this.rollupAddress.toString(),
fromBlock,
toBlock: toBlock + 1n, // toBlock is exclusive
strict: true,
event: getAbiItem({ abi: RollupAbi, name: 'L2ProofVerified' }),
});
const logs = await retrieveL2ProofVerifiedEvents(this.publicClient, this.rollupAddress, fromBlock, toBlock);

const lastLog = logs[logs.length - 1];
if (!lastLog) {
return;
}

const provenBlockNumber = lastLog.args.blockNumber;
const provenBlockNumber = lastLog.l2BlockNumber;
if (!provenBlockNumber) {
throw new Error(`Missing argument blockNumber from L2ProofVerified event`);
}
Expand Down
27 changes: 25 additions & 2 deletions yarn-project/archiver/src/archiver/data_retrieval.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { type Body, type InboxLeaf } from '@aztec/circuit-types';
import { type AppendOnlyTreeSnapshot, type Header } from '@aztec/circuits.js';
import { type AppendOnlyTreeSnapshot, Fr, type Header } from '@aztec/circuits.js';
import { type EthAddress } from '@aztec/foundation/eth-address';
import { RollupAbi } from '@aztec/l1-artifacts';

import { type PublicClient } from 'viem';
import { type PublicClient, getAbiItem } from 'viem';

import {
getL2BlockProcessedLogs,
Expand Down Expand Up @@ -143,3 +144,25 @@ export async function retrieveL1ToL2Messages(
} while (blockUntilSynced && searchStartBlock <= searchEndBlock);
return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedL1ToL2Messages };
}

/** Retrieves L2ProofVerified events from the rollup contract. */
export async function retrieveL2ProofVerifiedEvents(
publicClient: PublicClient,
rollupAddress: EthAddress,
searchStartBlock: bigint,
searchEndBlock?: bigint,
): Promise<{ l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr }[]> {
const logs = await publicClient.getLogs({
address: rollupAddress.toString(),
fromBlock: searchStartBlock,
toBlock: searchEndBlock ? searchEndBlock + 1n : undefined,
strict: true,
event: getAbiItem({ abi: RollupAbi, name: 'L2ProofVerified' }),
});

return logs.map(log => ({
l1BlockNumber: log.blockNumber,
l2BlockNumber: log.args.blockNumber,
proverId: Fr.fromString(log.args.proverId),
}));
}
3 changes: 3 additions & 0 deletions yarn-project/archiver/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ export * from './archiver/index.js';
export * from './rpc/index.js';
export * from './factory.js';

// We are not storing the info from these events in the archiver for now (and we don't really need to), so we expose this query directly
export { retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js';

const log = createDebugLogger('aztec:archiver');

/**
Expand Down
3 changes: 3 additions & 0 deletions yarn-project/circuit-types/src/interfaces/block-prover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,4 +64,7 @@ export interface BlockProver {
* Will pad the block to it's complete size with empty transactions and prove all the way to the root rollup.
*/
setBlockCompleted(): Promise<void>;

/** Returns an identifier for the prover or zero if not set. */
getProverId(): Fr;
}
3 changes: 3 additions & 0 deletions yarn-project/circuit-types/src/interfaces/prover-client.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { type TxHash } from '@aztec/circuit-types';
import { type Fr } from '@aztec/circuits.js';

import { type BlockProver } from './block-prover.js';
import { type ProvingJobSource } from './proving-job.js';
Expand All @@ -21,6 +22,8 @@ export type ProverConfig = {
proverJobTimeoutMs: number;
/** The interval to check job health status */
proverJobPollIntervalMs: number;
/** Identifier of the prover */
proverId?: Fr;
};

/**
Expand Down
11 changes: 9 additions & 2 deletions yarn-project/circuits.js/src/structs/rollup/root_rollup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ export class RootRollupInputs {
* Sibling path of the new block tree root.
*/
public newArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
/** Identifier of the prover for this root rollup. */
public proverId: Fr,
) {}

/**
Expand Down Expand Up @@ -89,6 +91,7 @@ export class RootRollupInputs {
fields.startL1ToL2MessageTreeSnapshot,
fields.startArchiveSnapshot,
fields.newArchiveSiblingPath,
fields.proverId,
] as const;
}

Expand All @@ -107,6 +110,7 @@ export class RootRollupInputs {
reader.readObject(AppendOnlyTreeSnapshot),
reader.readObject(AppendOnlyTreeSnapshot),
reader.readArray(ARCHIVE_HEIGHT, Fr),
reader.readObject(Fr),
);
}

Expand All @@ -133,10 +137,12 @@ export class RootRollupPublicInputs {
public vkTreeRoot: Fr,
/** A header of an L2 block. */
public header: Header,
/** Identifier of the prover who generated this proof. */
public proverId: Fr,
) {}

static getFields(fields: FieldsOf<RootRollupPublicInputs>) {
return [fields.archive, fields.vkTreeRoot, fields.header] as const;
return [fields.archive, fields.vkTreeRoot, fields.header, fields.proverId] as const;
}

toBuffer() {
Expand All @@ -160,8 +166,9 @@ export class RootRollupPublicInputs {
const reader = BufferReader.asReader(buffer);
return new RootRollupPublicInputs(
reader.readObject(AppendOnlyTreeSnapshot),
Fr.fromBuffer(reader),
reader.readObject(Fr),
reader.readObject(Header),
reader.readObject(Fr),
);
}

Expand Down
2 changes: 2 additions & 0 deletions yarn-project/circuits.js/src/tests/factories.ts
Original file line number Diff line number Diff line change
Expand Up @@ -932,6 +932,7 @@ export function makeRootRollupInputs(seed = 0, globalVariables?: GlobalVariables
makeAppendOnlyTreeSnapshot(seed + 0x2200),
makeAppendOnlyTreeSnapshot(seed + 0x2200),
makeTuple(ARCHIVE_HEIGHT, fr, 0x2400),
fr(0x2500),
);
}

Expand Down Expand Up @@ -983,6 +984,7 @@ export function makeRootRollupPublicInputs(
archive: makeAppendOnlyTreeSnapshot(seed + 0x100),
header: makeHeader(seed + 0x200, blockNumber),
vkTreeRoot: fr(seed + 0x300),
proverId: fr(seed + 0x400),
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import { getLogger, setupL1Contracts, startAnvil } from '../fixtures/utils.js';
*/
describe('proof_verification', () => {
let proof: Proof;
let proverId: Fr;
let block: L2Block;
let aggregationObject: Fr[];
let anvil: Anvil | undefined;
Expand Down Expand Up @@ -121,6 +122,7 @@ describe('proof_verification', () => {

block = L2Block.fromString(blockResult.block);
proof = Proof.fromString(blockResult.proof);
proverId = Fr.ZERO;
aggregationObject = blockResult.aggregationObject.map((x: string) => Fr.fromString(x));
});

Expand Down Expand Up @@ -183,6 +185,7 @@ describe('proof_verification', () => {
const args = [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x${proverId.toBuffer().toString('hex')}`,
`0x${serializeToBuffer(aggregationObject).toString('hex')}`,
`0x${proof.withoutPublicInputs().toString('hex')}`,
] as const;
Expand Down
15 changes: 11 additions & 4 deletions yarn-project/end-to-end/src/e2e_prover_node.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getSchnorrAccount } from '@aztec/accounts/schnorr';
import { type Archiver } from '@aztec/archiver';
import { type Archiver, retrieveL2ProofVerifiedEvents } from '@aztec/archiver';
import {
type AccountWalletWithSecretKey,
type AztecAddress,
Expand Down Expand Up @@ -29,7 +29,7 @@ import {
// Tests simple block building with a sequencer that does not upload proofs to L1,
// and then follows with a prover node run (with real proofs disabled, but
// still simulating all circuits via a prover-client), in order to test
// the coordination between the sequencer and the prover node.
// the coordination through L1 between the sequencer and the prover node.
describe('e2e_prover_node', () => {
let ctx: SubsystemsContext;
let wallet: AccountWalletWithSecretKey;
Expand Down Expand Up @@ -133,15 +133,22 @@ describe('e2e_prover_node', () => {

// Kick off a prover node
await sleep(1000);
logger.info('Creating prover node');
const proverId = Fr.fromString(Buffer.from('awesome-prover', 'utf-8').toString('hex'));
logger.info(`Creating prover node ${proverId.toString()}`);
// HACK: We have to use the existing archiver to fetch L2 data, since anvil's chain dump/load used by the
// snapshot manager does not include events nor txs, so a new archiver would not "see" old blocks.
const proverConfig = { ...ctx.aztecNodeConfig, txProviderNodeUrl: undefined, dataDirectory: undefined };
const proverConfig = { ...ctx.aztecNodeConfig, txProviderNodeUrl: undefined, dataDirectory: undefined, proverId };
const archiver = ctx.aztecNode.getBlockSource() as Archiver;
const proverNode = await createProverNode(proverConfig, { aztecNodeTxProvider: ctx.aztecNode, archiver });

// Prove the first two blocks
await prove(proverNode, firstBlock);
await prove(proverNode, firstBlock + 1);

// Check that the prover id made it to the emitted event
const { publicClient, l1ContractAddresses } = ctx.deployL1ContractsValues;
const logs = await retrieveL2ProofVerifiedEvents(publicClient, l1ContractAddresses.rollupAddress, 1n);
expect(logs[0].l2BlockNumber).toEqual(BigInt(firstBlock));
expect(logs[0].proverId.toString()).toEqual(proverId.toString());
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -2005,6 +2005,7 @@ export function mapRootRollupInputsToNoir(rootRollupInputs: RootRollupInputs): R
),
start_archive_snapshot: mapAppendOnlyTreeSnapshotToNoir(rootRollupInputs.startArchiveSnapshot),
new_archive_sibling_path: mapTuple(rootRollupInputs.newArchiveSiblingPath, mapFieldToNoir),
prover_id: mapFieldToNoir(rootRollupInputs.proverId),
};
}

Expand Down Expand Up @@ -2045,6 +2046,7 @@ export function mapRootRollupPublicInputsFromNoir(
mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.archive),
mapFieldFromNoir(rootRollupPublicInputs.vk_tree_root),
mapHeaderFromNoir(rootRollupPublicInputs.header),
mapFieldFromNoir(rootRollupPublicInputs.prover_id),
);
}

Expand Down
Loading

0 comments on commit 0120462

Please sign in to comment.