Skip to content

Commit

Permalink
fix: Avoid huge compilation times in base rollup (#9113)
Browse files Browse the repository at this point in the history
This temporarily disables the checking of decomposition against P, that
together with the new sha256 noir implementation generates a blowup in
compilation time and ram usage. This is fine because we are going to
replace this with poseidon for blobs anyway
  • Loading branch information
sirasistant authored Oct 9, 2024
1 parent 666fc38 commit 6eb43b6
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,8 @@ pub fn compute_tx_effects_hash(

let mut hash_input_flattened = [0; TX_EFFECTS_HASH_INPUT_FIELDS * 32];
for offset in 0..TX_EFFECTS_HASH_INPUT_FIELDS {
let input_as_bytes: [u8; 32] = tx_effects_hash_input[offset].to_be_bytes();
// TODO: This is not checking that the decomposition is smaller than P
let input_as_bytes: [u8; 32] = tx_effects_hash_input[offset].to_be_radix(256);
for byte_index in 0..32 {
hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index];
}
Expand Down
6 changes: 4 additions & 2 deletions noir-projects/noir-protocol-circuits/crates/types/src/hash.nr
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,8 @@ pub fn compute_tx_logs_hash(logs: [LogHash; MAX_ENCRYPTED_LOGS_PER_TX]) -> Field
// Convert each field element into a byte array and append the bytes to `hash_input_flattened`
let mut hash_input_flattened = [0; MAX_ENCRYPTED_LOGS_PER_TX * 32];
for offset in 0..MAX_ENCRYPTED_LOGS_PER_TX {
let input_as_bytes: [u8; 32] = logs[offset].value.to_be_bytes();
// TODO: This is not checking that the decomposition is smaller than P
let input_as_bytes: [u8; 32] = logs[offset].value.to_be_radix(256);
for byte_index in 0..32 {
hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index];
}
Expand All @@ -233,7 +234,8 @@ pub fn compute_tx_note_logs_hash(logs: [LogHash; MAX_NOTE_ENCRYPTED_LOGS_PER_TX]
// Convert each field element into a byte array and append the bytes to `hash_input_flattened`
let mut hash_input_flattened = [0; MAX_NOTE_ENCRYPTED_LOGS_PER_TX * 32];
for offset in 0..MAX_NOTE_ENCRYPTED_LOGS_PER_TX {
let input_as_bytes: [u8; 32] = logs[offset].value.to_be_bytes();
// TODO: This is not checking that the decomposition is smaller than P
let input_as_bytes: [u8; 32] = logs[offset].value.to_be_radix(256);
for byte_index in 0..32 {
hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index];
}
Expand Down

0 comments on commit 6eb43b6

Please sign in to comment.