Skip to content

Commit

Permalink
feat(avm): keccak as blackbox function (#5722)
Browse files Browse the repository at this point in the history
  • Loading branch information
fcarreiro authored Apr 12, 2024
1 parent 3e44a58 commit 6ea677a
Show file tree
Hide file tree
Showing 11 changed files with 117 additions and 94 deletions.
31 changes: 26 additions & 5 deletions avm-transpiler/src/transpile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ fn handle_foreign_call(
"avmOpcodeNullifierExists" => handle_nullifier_exists(avm_instrs, destinations, inputs),
"avmOpcodeL1ToL2MsgExists" => handle_l1_to_l2_msg_exists(avm_instrs, destinations, inputs),
"avmOpcodeSendL2ToL1Msg" => handle_send_l2_to_l1_msg(avm_instrs, destinations, inputs),
"avmOpcodeKeccak256" | "avmOpcodeSha256" => {
"avmOpcodeSha256" => {
handle_2_field_hash_instruction(avm_instrs, function, destinations, inputs)
}
"avmOpcodeGetContractInstance" => {
Expand Down Expand Up @@ -649,7 +649,6 @@ fn handle_send_l2_to_l1_msg(
/// Two field hash instructions represent instruction's that's outputs are larger than a field element
///
/// This includes:
/// - keccak
/// - sha256
///
/// In the future the output of these may expand / contract depending on what is most efficient for the circuit
Expand All @@ -664,7 +663,7 @@ fn handle_2_field_hash_instruction(
let message_offset_maybe = inputs[0];
let (message_offset, message_size) = match message_offset_maybe {
ValueOrArray::HeapArray(HeapArray { pointer, size }) => (pointer.0, size),
_ => panic!("Keccak | Sha256 address inputs destination should be a single value"),
_ => panic!("Sha256 address inputs destination should be a single value"),
};

assert!(destinations.len() == 1);
Expand All @@ -674,11 +673,10 @@ fn handle_2_field_hash_instruction(
assert!(size == 2);
pointer.0
}
_ => panic!("Keccak | Poseidon address destination should be a single value"),
_ => panic!("Poseidon address destination should be a single value"),
};

let opcode = match function {
"avmOpcodeKeccak256" => AvmOpcode::KECCAK,
"avmOpcodeSha256" => AvmOpcode::SHA256,
_ => panic!(
"Transpiler doesn't know how to process ForeignCall function {:?}",
Expand Down Expand Up @@ -902,6 +900,29 @@ fn handle_black_box_function(avm_instrs: &mut Vec<AvmInstruction>, operation: &B
..Default::default()
});
}
BlackBoxOp::Keccak256 { message, output } => {
let message_offset = message.pointer.0;
let message_size_offset = message.size.0;
let dest_offset = output.pointer.0;
assert_eq!(output.size, 32, "Keccak256 output size must be 32!");

avm_instrs.push(AvmInstruction {
opcode: AvmOpcode::KECCAK,
indirect: Some(ZEROTH_OPERAND_INDIRECT | FIRST_OPERAND_INDIRECT),
operands: vec![
AvmOperand::U32 {
value: dest_offset as u32,
},
AvmOperand::U32 {
value: message_offset as u32,
},
AvmOperand::U32 {
value: message_size_offset as u32,
},
],
..Default::default()
});
}
_ => panic!("Transpiler doesn't know how to process {:?}", operation),
}
}
Expand Down
3 changes: 0 additions & 3 deletions noir-projects/aztec-nr/aztec/src/avm/hash.nr
Original file line number Diff line number Diff line change
@@ -1,5 +1,2 @@
#[oracle(avmOpcodeKeccak256)]
pub fn keccak256<N>(input: [Field; N]) -> [Field; 2] {}

#[oracle(avmOpcodeSha256)]
pub fn sha256<N>(input: [Field; N]) -> [Field; 2] {}
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ contract AvmTest {
use dep::compressed_string::CompressedString;

// avm lib
use dep::aztec::avm::hash::{keccak256, sha256};
use dep::aztec::avm::hash::sha256;

#[aztec(storage)]
struct Storage {
Expand Down Expand Up @@ -145,8 +145,8 @@ contract AvmTest {
* Hashing functions
************************************************************************/
#[aztec(public-vm)]
fn keccak_hash(data: [Field; 10]) -> pub [Field; 2] {
keccak256(data)
fn keccak_hash(data: [u8; 10]) -> pub [u8; 32] {
dep::std::hash::keccak256(data, data.len() as u32)
}

#[aztec(public-vm)]
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/aztec.js/src/utils/cheat_codes.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { type Note, type PXE } from '@aztec/circuit-types';
import { type AztecAddress, type EthAddress, Fr } from '@aztec/circuits.js';
import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer';
import { keccak, pedersenHash } from '@aztec/foundation/crypto';
import { keccak256, pedersenHash } from '@aztec/foundation/crypto';
import { createDebugLogger } from '@aztec/foundation/log';

import fs from 'fs';
Expand Down Expand Up @@ -167,7 +167,7 @@ export class EthCheatCodes {
public keccak256(baseSlot: bigint, key: bigint): bigint {
// abi encode (removing the 0x) - concat key and baseSlot (both padded to 32 bytes)
const abiEncoded = toHex(key, true).substring(2) + toHex(baseSlot, true).substring(2);
return toBigIntBE(keccak(Buffer.from(abiEncoded, 'hex')));
return toBigIntBE(keccak256(Buffer.from(abiEncoded, 'hex')));
}

/**
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/foundation/src/abi/event_selector.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { fromHex, toBigIntBE } from '../bigint-buffer/index.js';
import { keccak, randomBytes } from '../crypto/index.js';
import { keccak256, randomBytes } from '../crypto/index.js';
import { type Fr } from '../fields/fields.js';
import { BufferReader } from '../serialize/buffer_reader.js';
import { Selector } from './selector.js';
Expand Down Expand Up @@ -44,7 +44,7 @@ export class EventSelector extends Selector {
if (/\s/.test(signature)) {
throw new Error('Signature cannot contain whitespace');
}
return EventSelector.fromBuffer(keccak(Buffer.from(signature)).subarray(0, Selector.SIZE));
return EventSelector.fromBuffer(keccak256(Buffer.from(signature)).subarray(0, Selector.SIZE));
}

/**
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/foundation/src/abi/function_selector.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { fromHex, toBigIntBE } from '../bigint-buffer/index.js';
import { keccak, randomBytes } from '../crypto/index.js';
import { keccak256, randomBytes } from '../crypto/index.js';
import { type Fr } from '../fields/fields.js';
import { BufferReader } from '../serialize/buffer_reader.js';
import { FieldReader } from '../serialize/field_reader.js';
Expand Down Expand Up @@ -72,7 +72,7 @@ export class FunctionSelector extends Selector {
if (/\s/.test(signature)) {
throw new Error('Signature cannot contain whitespace');
}
return FunctionSelector.fromBuffer(keccak(Buffer.from(signature)).subarray(0, Selector.SIZE));
return FunctionSelector.fromBuffer(keccak256(Buffer.from(signature)).subarray(0, Selector.SIZE));
}

/**
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/foundation/src/crypto/keccak/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { Keccak } from 'sha3';
* @param input - The input buffer to be hashed.
* @returns The computed Keccak-256 hash as a Buffer.
*/
export function keccak(input: Buffer) {
export function keccak256(input: Buffer) {
const hash = new Keccak(256);
return hash.update(input).digest();
}
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/pxe/src/kernel_prover/kernel_prover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ export class KernelProver {
await this.oracle.getContractClassIdPreimage(contractClassId);

// TODO(#262): Use real acir hash
// const acirHash = keccak(Buffer.from(bytecode, 'hex'));
// const acirHash = keccak256(Buffer.from(bytecode, 'hex'));
const acirHash = Fr.fromBuffer(Buffer.alloc(32, 0));

// TODO
Expand Down
60 changes: 33 additions & 27 deletions yarn-project/simulator/src/avm/avm_simulator.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { UnencryptedL2Log } from '@aztec/circuit-types';
import { computeVarArgsHash } from '@aztec/circuits.js/hash';
import { EventSelector, FunctionSelector } from '@aztec/foundation/abi';
import { AztecAddress } from '@aztec/foundation/aztec-address';
import { keccak, pedersenHash, poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
import { keccak256, pedersenHash, poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
import { EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import { type Fieldable } from '@aztec/foundation/serialize';
Expand All @@ -13,7 +13,7 @@ import { strict as assert } from 'assert';

import { isAvmBytecode } from '../public/transitional_adaptors.js';
import { AvmMachineState } from './avm_machine_state.js';
import { TypeTag } from './avm_memory_types.js';
import { Field, type MemoryValue, TypeTag, Uint8 } from './avm_memory_types.js';
import { AvmSimulator } from './avm_simulator.js';
import {
adjustCalldataIndex,
Expand Down Expand Up @@ -122,37 +122,36 @@ describe('AVM simulator: transpiled Noir contracts', () => {
});

describe.each([
['sha256_hash', sha256],
['keccak_hash', keccak],
])('Hashes with 2 fields returned in noir contracts', (name: string, hashFunction: (data: Buffer) => Buffer) => {
it(`Should execute contract function that performs ${name} hash`, async () => {
const calldata = [...Array(10)].map(_ => Fr.random());
const hash = hashFunction(Buffer.concat(calldata.map(f => f.toBuffer())));
[
'sha256_hash',
/*input=*/ randomFields(10),
/*output=*/ (fields: Field[]) => {
const resBuffer = sha256(Buffer.concat(fields.map(f => f.toBuffer())));
return [new Fr(resBuffer.subarray(0, 16)), new Fr(resBuffer.subarray(16, 32))];
},
],
[
'keccak_hash',
/*input=*/ randomBytes(10),
/*output=*/ (bytes: Uint8[]) => [...keccak256(Buffer.concat(bytes.map(b => b.toBuffer())))].map(b => new Fr(b)),
],
['poseidon2_hash', /*input=*/ randomFields(10), /*output=*/ (fields: Fieldable[]) => [poseidon2Hash(fields)]],
['pedersen_hash', /*input=*/ randomFields(10), /*output=*/ (fields: Fieldable[]) => [pedersenHash(fields)]],
[
'pedersen_hash_with_index',
/*input=*/ randomFields(10),
/*output=*/ (fields: Fieldable[]) => [pedersenHash(fields, /*index=*/ 20)],
],
])('Hashes in noir contracts', (name: string, input: MemoryValue[], output: (msg: any[]) => Fr[]) => {
it(`Should execute contract function that performs ${name}`, async () => {
const calldata = input.map(e => e.toFr());

const context = initContext({ env: initExecutionEnvironment({ calldata }) });
const bytecode = getAvmTestContractBytecode(name);
const results = await new AvmSimulator(context).executeBytecode(bytecode);

expect(results.reverted).toBe(false);
expect(results.output).toEqual([new Fr(hash.subarray(0, 16)), new Fr(hash.subarray(16, 32))]);
});
});

describe.each([
['poseidon2_hash', poseidon2Hash],
['pedersen_hash', pedersenHash],
['pedersen_hash_with_index', (m: Fieldable[]) => pedersenHash(m, 20)],
])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Fieldable[]) => Fr) => {
it(`Should execute contract function that performs ${name} hash`, async () => {
const calldata = [...Array(10)].map(_ => Fr.random());
const hash = hashFunction(calldata);

const context = initContext({ env: initExecutionEnvironment({ calldata }) });
const bytecode = getAvmTestContractBytecode(name);
const results = await new AvmSimulator(context).executeBytecode(bytecode);

expect(results.reverted).toBe(false);
expect(results.output).toEqual([new Fr(hash)]);
expect(results.output).toEqual(output(input));
});
});

Expand Down Expand Up @@ -924,3 +923,10 @@ function getAvmNestedCallsTestContractBytecode(functionName: string): Buffer {
);
return artifact.bytecode;
}

function randomBytes(length: number): Uint8[] {
return [...Array(length)].map(_ => new Uint8(Math.floor(Math.random() * 255)));
}
function randomFields(length: number): Field[] {
return [...Array(length)].map(_ => new Field(Fr.random()));
}
62 changes: 33 additions & 29 deletions yarn-project/simulator/src/avm/opcodes/hashing.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { keccak, pedersenHash, sha256 } from '@aztec/foundation/crypto';
import { keccak256, pedersenHash, sha256 } from '@aztec/foundation/crypto';

import { type AvmContext } from '../avm_context.js';
import { Field, Uint32 } from '../avm_memory_types.js';
import { Field, Uint8, Uint32 } from '../avm_memory_types.js';
import { initContext } from '../fixtures/index.js';
import { Addressing, AddressingMode } from './addressing_mode.js';
import { Keccak, Pedersen, Poseidon2, Sha256 } from './hashing.js';
Expand Down Expand Up @@ -74,61 +74,65 @@ describe('Hashing Opcodes', () => {
1, // indirect
...Buffer.from('12345678', 'hex'), // dstOffset
...Buffer.from('23456789', 'hex'), // messageOffset
...Buffer.from('3456789a', 'hex'), // hashSize
...Buffer.from('3456789a', 'hex'), // messageSizeOffset
]);
const inst = new Keccak(
/*indirect=*/ 1,
/*dstOffset=*/ 0x12345678,
/*messageOffset=*/ 0x23456789,
/*hashSize=*/ 0x3456789a,
/*messageSizeOffset=*/ 0x3456789a,
);

expect(Keccak.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly - direct', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const args = [...Array(10)].map(_ => new Uint8(Math.floor(Math.random() * 255)));
const indirect = 0;
const messageOffset = 0;
const messageSizeOffset = 15;
const dstOffset = 20;
context.machineState.memory.set(messageSizeOffset, new Uint32(args.length));
context.machineState.memory.setSlice(messageOffset, args);

const dstOffset = 3;
await new Keccak(indirect, dstOffset, messageOffset, messageSizeOffset).execute(context);

const inputBuffer = Buffer.concat(args.map(field => field.toBuffer()));
const expectedHash = keccak(inputBuffer);
await new Keccak(indirect, dstOffset, messageOffset, args.length).execute(context);

const result = context.machineState.memory.getSliceAs<Field>(dstOffset, 2);
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]);

expect(combined).toEqual(expectedHash);
const resultBuffer = Buffer.concat(
context.machineState.memory.getSliceAs<Uint8>(dstOffset, 32).map(byte => byte.toBuffer()),
);
const inputBuffer = Buffer.concat(args.map(byte => byte.toBuffer()));
const expectedHash = keccak256(inputBuffer);
expect(resultBuffer).toEqual(expectedHash);
});

it('Should hash correctly - indirect', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const args = [...Array(10)].map(_ => new Uint8(Math.floor(Math.random() * 255)));
const indirect = new Addressing([
/*dstOffset=*/ AddressingMode.INDIRECT,
/*messageOffset*/ AddressingMode.INDIRECT,
/*messageSizeOffset*/ AddressingMode.INDIRECT,
]).toWire();
const messageOffset = 0;
const argsLocation = 4;

const messageOffsetReal = 10;
const messageSizeOffset = 1;
const messageSizeOffsetReal = 100;
const dstOffset = 2;
const readLocation = 6;
const dstOffsetReal = 30;
context.machineState.memory.set(messageOffset, new Uint32(messageOffsetReal));
context.machineState.memory.set(dstOffset, new Uint32(dstOffsetReal));
context.machineState.memory.set(messageSizeOffset, new Uint32(messageSizeOffsetReal));
context.machineState.memory.set(messageSizeOffsetReal, new Uint32(args.length));
context.machineState.memory.setSlice(messageOffsetReal, args);

context.machineState.memory.set(messageOffset, new Uint32(argsLocation));
context.machineState.memory.set(dstOffset, new Uint32(readLocation));
context.machineState.memory.setSlice(argsLocation, args);
await new Keccak(indirect, dstOffset, messageOffset, messageSizeOffset).execute(context);

const inputBuffer = Buffer.concat(args.map(field => field.toBuffer()));
const expectedHash = keccak(inputBuffer);
await new Keccak(indirect, dstOffset, messageOffset, args.length).execute(context);

const result = context.machineState.memory.getSliceAs<Field>(readLocation, 2);
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]);

expect(combined).toEqual(expectedHash);
const resultBuffer = Buffer.concat(
context.machineState.memory.getSliceAs<Uint8>(dstOffsetReal, 32).map(byte => byte.toBuffer()),
);
const inputBuffer = Buffer.concat(args.map(byte => byte.toBuffer()));
const expectedHash = keccak256(inputBuffer);
expect(resultBuffer).toEqual(expectedHash);
});
});

Expand Down
Loading

0 comments on commit 6ea677a

Please sign in to comment.