From 9db8e447971f3bf0d0dad2e61d15c4af6ab1d2bd Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Sat, 16 Mar 2024 19:14:14 +0000 Subject: [PATCH] refactor: extract tx validation to separate class --- yarn-project/circuit-types/src/tx/tx.ts | 7 +- .../end-to-end/src/e2e_block_building.test.ts | 106 +++++++++------ .../src/sequencer/sequencer.test.ts | 2 +- .../src/sequencer/sequencer.ts | 110 +++------------ .../src/sequencer/tx_validator.test.ts | 77 +++++++++++ .../src/sequencer/tx_validator.ts | 128 ++++++++++++++++++ 6 files changed, 292 insertions(+), 138 deletions(-) create mode 100644 yarn-project/sequencer-client/src/sequencer/tx_validator.test.ts create mode 100644 yarn-project/sequencer-client/src/sequencer/tx_validator.ts diff --git a/yarn-project/circuit-types/src/tx/tx.ts b/yarn-project/circuit-types/src/tx/tx.ts index 1b7aeb0768e..4f37eddc2db 100644 --- a/yarn-project/circuit-types/src/tx/tx.ts +++ b/yarn-project/circuit-types/src/tx/tx.ts @@ -176,7 +176,6 @@ export class Tx { * @returns - The hash. */ static getHash(tx: Tx | HasHash): TxHash { - const hasHash = (tx: Tx | HasHash): tx is HasHash => (tx as HasHash).hash !== undefined; return hasHash(tx) ? tx.hash : tx.getTxHash(); } @@ -186,7 +185,7 @@ export class Tx { * @returns The corresponding array of hashes. */ static getHashes(txs: (Tx | HasHash)[]): TxHash[] { - return txs.map(tx => Tx.getHash(tx)); + return txs.map(Tx.getHash); } /** @@ -208,3 +207,7 @@ export class Tx { /** Utility type for an entity that has a hash property for a txhash */ type HasHash = { /** The tx hash */ hash: TxHash }; + +function hasHash(tx: Tx | HasHash): tx is HasHash { + return (tx as HasHash).hash !== undefined; +} diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 33fb95fafec..d20554df81b 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -9,6 +9,7 @@ import { PXE, SentTx, TxReceipt, + TxStatus, Wallet, } from '@aztec/aztec.js'; import { times } from '@aztec/foundation/collection'; @@ -110,8 +111,7 @@ describe('e2e_block_building', () => { }, 60_000); }); - // Regressions for https://github.com/AztecProtocol/aztec-packages/issues/2502 - describe('double-spends on the same block', () => { + describe('double-spends', () => { let contract: TestContract; let teardown: () => Promise; @@ -123,48 +123,66 @@ describe('e2e_block_building', () => { afterAll(() => teardown()); - it('drops tx with private nullifier already emitted on the same block', async () => { - const nullifier = Fr.random(); - const calls = times(2, () => contract.methods.emit_nullifier(nullifier)); - for (const call of calls) { - await call.simulate(); - } - const [tx1, tx2] = calls.map(call => call.send()); - await expectXorTx(tx1, tx2); - }, 30_000); - - it('drops tx with public nullifier already emitted on the same block', async () => { - const secret = Fr.random(); - const calls = times(2, () => contract.methods.create_nullifier_public(140n, secret)); - for (const call of calls) { - await call.simulate(); - } - const [tx1, tx2] = calls.map(call => call.send()); - await expectXorTx(tx1, tx2); - }, 30_000); - - it('drops tx with two equal nullifiers', async () => { - const nullifier = Fr.random(); - const calls = times(2, () => contract.methods.emit_nullifier(nullifier).request()); - await expect(new BatchCall(owner, calls).send().wait()).rejects.toThrow(/dropped/); - }, 30_000); - - it('drops tx with private nullifier already emitted from public on the same block', async () => { - const secret = Fr.random(); - // See yarn-project/simulator/src/public/index.test.ts 'Should be able to create a nullifier from the public context' - const emittedPublicNullifier = pedersenHash([new Fr(140), secret].map(a => a.toBuffer())); - - const calls = [ - contract.methods.create_nullifier_public(140n, secret), - contract.methods.emit_nullifier(emittedPublicNullifier), - ]; - - for (const call of calls) { - await call.simulate(); - } - const [tx1, tx2] = calls.map(call => call.send()); - await expectXorTx(tx1, tx2); - }, 30_000); + // Regressions for https://github.com/AztecProtocol/aztec-packages/issues/2502 + describe('in the same block', () => { + it('drops tx with private nullifier already emitted on the same block', async () => { + const nullifier = Fr.random(); + const calls = times(2, () => contract.methods.emit_nullifier(nullifier)); + for (const call of calls) { + await call.simulate(); + } + const [tx1, tx2] = calls.map(call => call.send()); + await expectXorTx(tx1, tx2); + }, 30_000); + + it('drops tx with public nullifier already emitted on the same block', async () => { + const secret = Fr.random(); + const calls = times(2, () => contract.methods.create_nullifier_public(140n, secret)); + for (const call of calls) { + await call.simulate(); + } + const [tx1, tx2] = calls.map(call => call.send()); + await expectXorTx(tx1, tx2); + }, 30_000); + + it('drops tx with two equal nullifiers', async () => { + const nullifier = Fr.random(); + const calls = times(2, () => contract.methods.emit_nullifier(nullifier).request()); + await expect(new BatchCall(owner, calls).send().wait()).rejects.toThrow(/dropped/); + }, 30_000); + + it('drops tx with private nullifier already emitted from public on the same block', async () => { + const secret = Fr.random(); + // See yarn-project/simulator/src/public/index.test.ts 'Should be able to create a nullifier from the public context' + const emittedPublicNullifier = pedersenHash([new Fr(140), secret].map(a => a.toBuffer())); + + const calls = [ + contract.methods.create_nullifier_public(140n, secret), + contract.methods.emit_nullifier(emittedPublicNullifier), + ]; + + for (const call of calls) { + await call.simulate(); + } + const [tx1, tx2] = calls.map(call => call.send()); + await expectXorTx(tx1, tx2); + }, 30_000); + }); + + describe('across blocks', () => { + it('drops a tx that tries to spend a nullifier already emitted on a previous block', async () => { + const secret = Fr.random(); + const emittedPublicNullifier = pedersenHash([new Fr(140), secret].map(a => a.toBuffer())); + + await expect(contract.methods.create_nullifier_public(140n, secret).send().wait()).resolves.toEqual( + expect.objectContaining({ + status: TxStatus.MINED, + }), + ); + + await expect(contract.methods.emit_nullifier(emittedPublicNullifier).send().wait()).rejects.toThrow(/dropped/); + }); + }); }); }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 8e151a0fc6a..0b6e9e496ce 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -132,7 +132,7 @@ describe('sequencer', () => { ); // We make a nullifier from tx1 a part of the nullifier tree, so it gets rejected as double spend - const doubleSpendNullifier = doubleSpendTx.data.end.newNullifiers[0].toBuffer(); + const doubleSpendNullifier = doubleSpendTx.data.end.newNullifiers[0].value.toBuffer(); merkleTreeOps.findLeafIndex.mockImplementation((treeId: MerkleTreeId, value: Buffer) => { return Promise.resolve( treeId === MerkleTreeId.NULLIFIER_TREE && value.equals(doubleSpendNullifier) ? 1n : undefined, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 5e58ba6fd83..9050ee1a983 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -16,6 +16,7 @@ import { ceilPowerOfTwo } from '../utils.js'; import { SequencerConfig } from './config.js'; import { ProcessedTx } from './processed_tx.js'; import { PublicProcessorFactory } from './public_processor.js'; +import { TxValidator } from './tx_validator.js'; /** * Sequencer client @@ -171,8 +172,18 @@ export class Sequencer { ); // Filter out invalid txs + const trees = this.worldState.getLatest(); + const txValidator = new TxValidator( + { + getNullifierIndex(nullifier: Fr): Promise { + return trees.findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer()); + }, + }, + newGlobalVariables, + ); + // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here - const validTxs = await this.takeValidTxs(pendingTxs, newGlobalVariables); + const validTxs = await this.takeValidTxs(pendingTxs, txValidator); if (validTxs.length < this.minTxsPerBLock) { return; } @@ -193,7 +204,7 @@ export class Sequencer { // public functions emitting nullifiers would pass earlier check but fail here. // Note that we're checking all nullifiers generated in the private execution twice, // we could store the ones already checked and skip them here as an optimization. - const processedValidTxs = await this.takeValidTxs(processedTxs, newGlobalVariables); + const processedValidTxs = await this.takeValidTxs(processedTxs, txValidator); if (processedValidTxs.length === 0) { this.log('No txs processed correctly to build block. Exiting'); @@ -251,47 +262,14 @@ export class Sequencer { } } - protected async takeValidTxs(txs: T[], globalVariables: GlobalVariables): Promise { - const validTxs: T[] = []; - const txsToDelete = []; - const thisBlockNullifiers: Set = new Set(); - - // Process txs until we get to maxTxsPerBlock, rejecting double spends in the process - for (const tx of txs) { - if (tx.data.constants.txContext.chainId.value !== globalVariables.chainId.value) { - this.log( - `Deleting tx for incorrect chain ${tx.data.constants.txContext.chainId.toString()}, tx hash ${Tx.getHash( - tx, - )}`, - ); - txsToDelete.push(tx); - continue; - } - if (await this.isTxDoubleSpend(tx)) { - this.log(`Deleting double spend tx ${Tx.getHash(tx)}`); - txsToDelete.push(tx); - continue; - } else if (this.isTxDoubleSpendSameBlock(tx, thisBlockNullifiers)) { - // We don't drop these txs from the p2p pool immediately since they become valid - // again if the current block fails to be published for some reason. - this.log(`Skipping tx with double-spend for this same block ${Tx.getHash(tx)}`); - continue; - } - - tx.data.end.newNullifiers.forEach(n => thisBlockNullifiers.add(n.value.toBigInt())); - tx.data.endNonRevertibleData.newNullifiers.forEach(n => thisBlockNullifiers.add(n.value.toBigInt())); - validTxs.push(tx); - if (validTxs.length >= this.maxTxsPerBlock) { - break; - } + protected async takeValidTxs(txs: T[], validator: TxValidator): Promise { + const [valid, invalid] = await validator.validateTxs(txs); + if (invalid.length > 0) { + this.log(`Dropping invalid txs from the p2p pool ${Tx.getHashes(invalid).join(', ')}`); + await this.p2pClient.deleteTxs(Tx.getHashes(invalid)); } - // Make sure we remove these from the tx pool so we do not consider it again - if (txsToDelete.length > 0) { - await this.p2pClient.deleteTxs(Tx.getHashes([...txsToDelete])); - } - - return validTxs; + return valid.slice(0, this.maxTxsPerBlock); } /** @@ -334,56 +312,6 @@ export class Sequencer { return block; } - /** - * Returns true if one of the tx nullifiers exist on the block being built. - * @param tx - The tx to test. - * @param thisBlockNullifiers - The nullifiers added so far. - */ - protected isTxDoubleSpendSameBlock(tx: Tx | ProcessedTx, thisBlockNullifiers: Set): boolean { - // We only consider non-empty nullifiers - const newNullifiers = [ - ...tx.data.endNonRevertibleData.newNullifiers.filter(n => !n.isEmpty()), - ...tx.data.end.newNullifiers.filter(n => !n.isEmpty()), - ]; - - for (const nullifier of newNullifiers) { - if (thisBlockNullifiers.has(nullifier.value.toBigInt())) { - return true; - } - } - return false; - } - - /** - * Returns true if one of the transaction nullifiers exist. - * Nullifiers prevent double spends in a private context. - * @param tx - The transaction. - * @returns Whether this is a problematic double spend that the L1 contract would reject. - */ - protected async isTxDoubleSpend(tx: Tx | ProcessedTx): Promise { - // We only consider non-empty nullifiers - const newNullifiers = [ - ...tx.data.endNonRevertibleData.newNullifiers.filter(n => !n.isEmpty()), - ...tx.data.end.newNullifiers.filter(n => !n.isEmpty()), - ]; - - // Ditch this tx if it has a repeated nullifiers - const uniqNullifiers = new Set(newNullifiers.map(n => n.value.toBigInt())); - if (uniqNullifiers.size !== newNullifiers.length) { - return true; - } - - for (const nullifier of newNullifiers) { - // TODO(AD): this is an exhaustive search currently - const db = this.worldState.getLatest(); - const indexInDb = await db.findLeafIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBuffer()); - if (indexInDb !== undefined) { - return true; - } - } - return false; - } - get coinbase(): EthAddress { return this._coinbase; } diff --git a/yarn-project/sequencer-client/src/sequencer/tx_validator.test.ts b/yarn-project/sequencer-client/src/sequencer/tx_validator.test.ts new file mode 100644 index 00000000000..f3ecf36691e --- /dev/null +++ b/yarn-project/sequencer-client/src/sequencer/tx_validator.test.ts @@ -0,0 +1,77 @@ +import { mockTx as baseMockTx } from '@aztec/circuit-types'; +import { Fr, GlobalVariables } from '@aztec/circuits.js'; +import { makeGlobalVariables } from '@aztec/circuits.js/testing'; + +import { MockProxy, mock, mockFn } from 'jest-mock-extended'; + +import { NullifierSource, TxValidator } from './tx_validator.js'; + +describe('TxValidator', () => { + let validator: TxValidator; + let globalVariables: GlobalVariables; + let nullifierSource: MockProxy; + + beforeEach(() => { + nullifierSource = mock({ + getNullifierIndex: mockFn().mockImplementation(() => { + return Promise.resolve(undefined); + }), + }); + globalVariables = makeGlobalVariables(); + validator = new TxValidator(nullifierSource, globalVariables); + }); + + describe('inspects tx metadata', () => { + it('allows only transactions for the right chain', async () => { + const goodTx = mockTx(); + const badTx = mockTx(); + badTx.data.constants.txContext.chainId = Fr.random(); + + await expect(validator.validateTxs([goodTx, badTx])).resolves.toEqual([[goodTx], [badTx]]); + }); + }); + + describe('inspects tx nullifiers', () => { + it('rejects duplicates in non revertible data', async () => { + const badTx = mockTx(); + badTx.data.endNonRevertibleData.newNullifiers[1] = badTx.data.endNonRevertibleData.newNullifiers[0]; + await expect(validator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + }); + + it('rejects duplicates in revertible data', async () => { + const badTx = mockTx(); + badTx.data.end.newNullifiers[1] = badTx.data.end.newNullifiers[0]; + await expect(validator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + }); + + it('rejects duplicates across phases', async () => { + const badTx = mockTx(); + badTx.data.end.newNullifiers[0] = badTx.data.endNonRevertibleData.newNullifiers[0]; + await expect(validator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + }); + + it('rejects duplicates across txs', async () => { + const firstTx = mockTx(); + const secondTx = mockTx(); + secondTx.data.end.newNullifiers[0] = firstTx.data.end.newNullifiers[0]; + await expect(validator.validateTxs([firstTx, secondTx])).resolves.toEqual([[firstTx], [secondTx]]); + }); + + it('rejects duplicates against history', async () => { + const badTx = mockTx(); + nullifierSource.getNullifierIndex.mockReturnValueOnce(Promise.resolve(1n)); + await expect(validator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + }); + }); + + // get unique txs that are also stable across test runs + let txSeed = 1; + /** Creates a mock tx for the current chain */ + function mockTx() { + const tx = baseMockTx(txSeed++, false); + tx.data.constants.txContext.chainId = globalVariables.chainId; + tx.data.constants.txContext.version = globalVariables.version; + + return tx; + } +}); diff --git a/yarn-project/sequencer-client/src/sequencer/tx_validator.ts b/yarn-project/sequencer-client/src/sequencer/tx_validator.ts new file mode 100644 index 00000000000..a11bfea27f5 --- /dev/null +++ b/yarn-project/sequencer-client/src/sequencer/tx_validator.ts @@ -0,0 +1,128 @@ +import { Tx } from '@aztec/circuit-types'; +import { Fr, GlobalVariables } from '@aztec/circuits.js'; +import { Logger, createDebugLogger } from '@aztec/foundation/log'; + +import { ProcessedTx } from './processed_tx.js'; + +export interface NullifierSource { + getNullifierIndex: (nullifier: Fr) => Promise; +} + +// prefer symbols over booleans so it's clear what the intention is +// vs returning true/false is tied to the function name +// eg. isDoubleSpend vs isValidChain assign different meanings to booleans +const VALID_TX = Symbol('valid_tx'); +const INVALID_TX = Symbol('invalid_tx'); + +type TxValidationStatus = typeof VALID_TX | typeof INVALID_TX; + +export class TxValidator { + #log: Logger; + #globalVariables: GlobalVariables; + #nullifierSource: NullifierSource; + + constructor( + nullifierSource: NullifierSource, + globalVariables: GlobalVariables, + log = createDebugLogger('aztec:sequencer:tx_validator'), + ) { + this.#nullifierSource = nullifierSource; + this.#globalVariables = globalVariables; + + this.#log = log; + } + + /** + * Validates a list of transactions. + * @param txs - The transactions to validate. + * @returns A tuple of valid and invalid transactions. + */ + public async validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[]]> { + const validTxs: T[] = []; + const invalidTxs: T[] = []; + const thisBlockNullifiers = new Set(); + + for (const tx of txs) { + if (this.#validateMetadata(tx) === INVALID_TX) { + invalidTxs.push(tx); + continue; + } + + if ((await this.#validateNullifiers(tx, thisBlockNullifiers)) === INVALID_TX) { + invalidTxs.push(tx); + continue; + } + + validTxs.push(tx); + } + + return [validTxs, invalidTxs]; + } + + /** + * It rejects transactions with the wrong chain id. + * @param tx - The transaction. + * @returns Whether the transaction is valid. + */ + #validateMetadata(tx: Tx | ProcessedTx): TxValidationStatus { + if (!tx.data.constants.txContext.chainId.equals(this.#globalVariables.chainId)) { + this.#log.warn( + `Rejecting tx ${Tx.getHash( + tx, + )} because of incorrect chain ${tx.data.constants.txContext.chainId.toString()} != ${this.#globalVariables.chainId.toString()}`, + ); + return INVALID_TX; + } + + return VALID_TX; + } + + /** + * It looks for duplicate nullifiers: + * - in the same transaction + * - in the same block + * - in the nullifier tree + * + * Nullifiers prevent double spends in a private context. + * + * @param tx - The transaction. + * @returns Whether this is a problematic double spend that the L1 contract would reject. + */ + async #validateNullifiers(tx: Tx | ProcessedTx, thisBlockNullifiers: Set): Promise { + const newNullifiers = TxValidator.#extractNullifiers(tx); + + // Ditch this tx if it has a repeated nullifiers + const uniqueNullifiers = new Set(newNullifiers); + if (uniqueNullifiers.size !== newNullifiers.length) { + this.#log.warn(`Rejecting tx for emitting duplicate nullifiers, tx hash ${Tx.getHash(tx)}`); + return INVALID_TX; + } + + for (const nullifier of newNullifiers) { + if (thisBlockNullifiers.has(nullifier)) { + this.#log.warn(`Rejecting tx for repeating a in the same block, tx hash ${Tx.getHash(tx)}`); + return INVALID_TX; + } + + thisBlockNullifiers.add(nullifier); + } + + const nullifierIndexes = await Promise.all( + newNullifiers.map(n => this.#nullifierSource.getNullifierIndex(new Fr(n))), + ); + + const hasDuplicates = nullifierIndexes.some(index => index !== undefined); + if (hasDuplicates) { + this.#log.warn(`Rejecting tx for repeating nullifiers from the past, tx hash ${Tx.getHash(tx)}`); + return INVALID_TX; + } + + return VALID_TX; + } + + static #extractNullifiers(tx: Tx | ProcessedTx): bigint[] { + return [...tx.data.endNonRevertibleData.newNullifiers, ...tx.data.end.newNullifiers] + .filter(x => !x.isEmpty()) + .map(x => x.value.toBigInt()); + } +}