Skip to content

Commit 5becb99

Browse files
authored
chore: refactor get_tx_effects_hash_input_helper (#11213)
This PR does some of the refactoring mentioned in #11037. I've removed some of the fixed length for-loops and avoided unnecessary byte decompositions.
1 parent 7e628cc commit 5becb99

File tree

1 file changed

+73
-97
lines changed
  • noir-projects/noir-protocol-circuits/crates/rollup-lib/src

1 file changed

+73
-97
lines changed

noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr

+73-97
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,10 @@ use crate::abis::{
44
};
55
use super::abis::tx_effect::TxEffect;
66
use dep::types::{
7-
abis::{log_hash::ScopedLogHash, public_data_write::PublicDataWrite, sponge_blob::SpongeBlob},
7+
abis::{
8+
log::Log, log_hash::ScopedLogHash, public_data_write::PublicDataWrite,
9+
sponge_blob::SpongeBlob,
10+
},
811
constants::{
912
AZTEC_MAX_EPOCH_DURATION, CONTRACT_CLASS_LOGS_PREFIX, L2_L1_MSGS_PREFIX,
1013
MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_L2_TO_L1_MSGS_PER_TX, MAX_NOTE_HASHES_PER_TX,
@@ -17,7 +20,7 @@ use dep::types::{
1720
hash::{accumulate_sha256, silo_unencrypted_log_hash},
1821
merkle_tree::VariableMerkleTree,
1922
traits::is_empty,
20-
utils::{arrays::{array_concat, array_length, array_merge}, field::field_from_bytes},
23+
utils::arrays::{array_length, array_merge},
2124
};
2225
use blob::blob_public_inputs::BlockBlobPublicInputs;
2326

@@ -134,8 +137,9 @@ pub fn compute_kernel_out_hash(l2_to_l1_msgs: [Field; MAX_L2_TO_L1_MSGS_PER_TX])
134137
* Uses 2 bytes to encode the length even when we only need 1 to keep uniform.
135138
*/
136139
pub fn encode_blob_prefix(input_type: u8, array_len: u32) -> Field {
137-
let len_bytes = (array_len as Field).to_be_bytes::<2>();
138-
field_from_bytes([input_type, 0, len_bytes[0], len_bytes[1]], true)
140+
let array_len = array_len as Field;
141+
array_len.assert_max_bit_size::<16>();
142+
(input_type as Field) * (256 * 256 * 256) + array_len
139143
}
140144

141145
// Tx effects consist of
@@ -185,7 +189,10 @@ pub(crate) fn append_tx_effects_for_blob(
185189
fn get_tx_effects_hash_input(
186190
tx_effect: TxEffect,
187191
) -> ([Field; TX_EFFECTS_BLOB_HASH_INPUT_FIELDS], u32) {
188-
let mut tx_effects_hash_input = unsafe { get_tx_effects_hash_input_helper(tx_effect) };
192+
tx_effect.transaction_fee.assert_max_bit_size::<29 * 8>();
193+
let TWO_POW_240 = 1766847064778384329583297500742918515827483896875618958121606201292619776;
194+
let prefixed_tx_fee: Field =
195+
(TX_FEE_PREFIX as Field) * TWO_POW_240 + (tx_effect.transaction_fee as Field);
189196

190197
let note_hashes = tx_effect.note_hashes;
191198
let nullifiers = tx_effect.nullifiers;
@@ -201,6 +208,21 @@ fn get_tx_effects_hash_input(
201208
silo_unencrypted_log_hash(log)
202209
});
203210

211+
let mut tx_effects_hash_input = unsafe {
212+
get_tx_effects_hash_input_helper(
213+
tx_effect.tx_hash,
214+
prefixed_tx_fee,
215+
tx_effect.note_hashes,
216+
tx_effect.nullifiers,
217+
tx_effect.l2_to_l1_msgs,
218+
public_data_update_requests,
219+
private_logs,
220+
unencrypted_logs,
221+
contract_class_logs,
222+
tx_effect.revert_code as Field,
223+
)
224+
};
225+
204226
let mut offset = 0;
205227
let mut array_len = 0;
206228

@@ -215,16 +237,7 @@ fn get_tx_effects_hash_input(
215237

216238
// TX FEE
217239
// Using 29 bytes to encompass all reasonable fee lengths
218-
assert_eq(
219-
tx_effects_hash_input[offset],
220-
field_from_bytes(
221-
array_concat(
222-
[TX_FEE_PREFIX, 0],
223-
tx_effect.transaction_fee.to_be_bytes::<29>(),
224-
),
225-
true,
226-
),
227-
);
240+
assert_eq(tx_effects_hash_input[offset], prefixed_tx_fee);
228241
offset += 1;
229242

230243
// NB: The array_length function does NOT constrain we have a sorted left-packed array.
@@ -349,126 +362,99 @@ fn get_tx_effects_hash_input(
349362
}
350363

351364
// Now we know the number of fields appended, we can assign the first value:
352-
// TX_START_PREFIX | 0 | txlen[0] txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revert_code
353-
// Start prefix is "tx_start".to_field() => 8 bytes
354-
let prefix_bytes = TX_START_PREFIX.to_be_bytes::<8>();
355-
let length_bytes = (offset as Field).to_be_bytes::<2>();
365+
let expected_tx_start_field =
366+
generate_tx_start_field(offset as Field, tx_effect.revert_code as Field);
356367
// REVERT CODE
357-
assert_eq(
358-
tx_effects_hash_input[0],
359-
field_from_bytes(
360-
array_concat(
361-
prefix_bytes,
362-
[
363-
0,
364-
length_bytes[0],
365-
length_bytes[1],
366-
0,
367-
REVERT_CODE_PREFIX,
368-
0,
369-
tx_effect.revert_code,
370-
],
371-
),
372-
true,
373-
),
374-
);
368+
assert_eq(tx_effects_hash_input[0], expected_tx_start_field);
375369

376370
(tx_effects_hash_input, offset)
377371
}
378372

373+
fn generate_tx_start_field(offset: Field, revert_code: Field) -> Field {
374+
// TX_START_PREFIX | 0 | 0 | 0 | 0 | REVERT_CODE_PREFIX | 0 | 0
375+
let constant = (TX_START_PREFIX as Field) * (256 * 256 * 256 * 256 * 256 * 256 * 256)
376+
+ (REVERT_CODE_PREFIX as Field) * (256 * 256);
377+
378+
let tx_start_field = constant + offset * (256 * 256 * 256 * 256) + revert_code;
379+
380+
tx_start_field
381+
}
382+
379383
unconstrained fn get_tx_effects_hash_input_helper(
380-
tx_effect: TxEffect,
384+
tx_hash: Field,
385+
prefixed_tx_fee: Field,
386+
note_hashes: [Field; MAX_NOTE_HASHES_PER_TX],
387+
nullifiers: [Field; MAX_NULLIFIERS_PER_TX],
388+
l2_to_l1_msgs: [Field; MAX_L2_TO_L1_MSGS_PER_TX],
389+
public_data_update_requests: [PublicDataWrite; MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX],
390+
private_logs: [Log<PRIVATE_LOG_SIZE_IN_FIELDS>; MAX_PRIVATE_LOGS_PER_TX],
391+
unencrypted_logs: [Field; MAX_UNENCRYPTED_LOGS_PER_TX],
392+
contract_class_logs: [Field; MAX_CONTRACT_CLASS_LOGS_PER_TX],
393+
revert_code: Field,
381394
) -> [Field; TX_EFFECTS_BLOB_HASH_INPUT_FIELDS] {
382395
let mut tx_effects_hash_input = [0; TX_EFFECTS_BLOB_HASH_INPUT_FIELDS];
383396

384-
let note_hashes = tx_effect.note_hashes;
385-
let nullifiers = tx_effect.nullifiers;
386-
387397
// Public writes are the concatenation of all non-empty user update requests and protocol update requests, then padded with zeroes.
388398
// The incoming all_public_data_update_requests may have empty update requests in the middle, so we move those to the end of the array.
389-
let public_data_update_requests =
390-
get_all_update_requests_for_tx_effects(tx_effect.public_data_writes);
391-
let private_logs = tx_effect.private_logs;
392-
let unencrypted_logs =
393-
tx_effect.unencrypted_logs_hashes.map(|log: ScopedLogHash| silo_unencrypted_log_hash(log));
394-
let contract_class_logs = tx_effect.contract_class_logs_hashes.map(|log: ScopedLogHash| {
395-
silo_unencrypted_log_hash(log)
396-
});
397-
398-
let mut offset = 0;
399-
let mut array_len = 0;
400-
401-
// NB: for publishing fields of blob data we use the first element of the blob to encode:
402-
// TX_START_PREFIX | 0 | txlen[0] txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revert_code
403-
// Two bytes are used to encode the number of fields appended here, given by 'offset'
404-
// We only know the value once the appending is complete, hence we overwrite input[0] below
405-
tx_effects_hash_input[offset] = 0;
406-
offset += 1;
407399

408-
tx_effects_hash_input[offset] = tx_effect.tx_hash;
409-
offset += 1;
400+
tx_effects_hash_input[1] = tx_hash;
410401

411402
// TX FEE
412403
// Using 29 bytes to encompass all reasonable fee lengths
413-
tx_effects_hash_input[offset] = field_from_bytes(
414-
array_concat(
415-
[TX_FEE_PREFIX, 0],
416-
tx_effect.transaction_fee.to_be_bytes::<29>(),
417-
),
418-
true,
419-
);
420-
offset += 1;
404+
tx_effects_hash_input[2] = prefixed_tx_fee;
405+
406+
let mut offset = 3;
421407

422408
// NB: The array_length function does NOT constrain we have a sorted left-packed array.
423409
// We can use it because all inputs here come from the kernels which DO constrain left-packing.
424410
// If that ever changes, we will have to constrain it by counting items differently.
425411
// NOTE HASHES
426-
array_len = array_length(note_hashes);
412+
let array_len = array_length(note_hashes);
427413
if array_len != 0 {
428414
let notes_prefix = encode_blob_prefix(NOTES_PREFIX, array_len);
429415
tx_effects_hash_input[offset] = notes_prefix;
430416
offset += 1;
431417

432-
for j in 0..MAX_NOTE_HASHES_PER_TX {
418+
for j in 0..array_len {
433419
tx_effects_hash_input[offset + j] = note_hashes[j];
434420
}
435421
offset += array_len;
436422
}
437423

438424
// NULLIFIERS
439-
array_len = array_length(nullifiers);
425+
let array_len = array_length(nullifiers);
440426
if array_len != 0 {
441427
let nullifiers_prefix = encode_blob_prefix(NULLIFIERS_PREFIX, array_len);
442428
tx_effects_hash_input[offset] = nullifiers_prefix;
443429
offset += 1;
444430

445-
for j in 0..MAX_NULLIFIERS_PER_TX {
431+
for j in 0..array_len {
446432
tx_effects_hash_input[offset + j] = nullifiers[j];
447433
}
448434
offset += array_len;
449435
}
450436

451437
// L2 TO L1 MESSAGES
452-
array_len = array_length(tx_effect.l2_to_l1_msgs);
438+
let array_len = array_length(l2_to_l1_msgs);
453439
if array_len != 0 {
454440
let l2_to_l1_msgs_prefix = encode_blob_prefix(L2_L1_MSGS_PREFIX, array_len);
455441
tx_effects_hash_input[offset] = l2_to_l1_msgs_prefix;
456442
offset += 1;
457443

458-
for j in 0..MAX_L2_TO_L1_MSGS_PER_TX {
459-
tx_effects_hash_input[offset + j] = tx_effect.l2_to_l1_msgs[j];
444+
for j in 0..array_len {
445+
tx_effects_hash_input[offset + j] = l2_to_l1_msgs[j];
460446
}
461447
offset += array_len;
462448
}
463449

464450
// PUBLIC DATA UPDATE REQUESTS
465-
array_len = array_length(public_data_update_requests);
451+
let array_len = array_length(public_data_update_requests);
466452
if array_len != 0 {
467453
let public_data_update_requests_prefix =
468454
encode_blob_prefix(PUBLIC_DATA_UPDATE_REQUESTS_PREFIX, array_len * 2);
469455
tx_effects_hash_input[offset] = public_data_update_requests_prefix;
470456
offset += 1;
471-
for j in 0..MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX {
457+
for j in 0..array_len {
472458
tx_effects_hash_input[offset + j * 2] = public_data_update_requests[j].leaf_slot;
473459
tx_effects_hash_input[offset + j * 2 + 1] = public_data_update_requests[j].value;
474460
}
@@ -477,13 +463,14 @@ unconstrained fn get_tx_effects_hash_input_helper(
477463

478464
// TODO(Miranda): squash 0s in a nested loop and add len prefix?
479465
// PRIVATE_LOGS
480-
array_len = array_length(private_logs) * PRIVATE_LOG_SIZE_IN_FIELDS;
481-
if array_len != 0 {
466+
let num_private_logs = array_length(private_logs);
467+
if num_private_logs != 0 {
468+
let array_len = num_private_logs * PRIVATE_LOG_SIZE_IN_FIELDS;
482469
let private_logs_prefix = encode_blob_prefix(PRIVATE_LOGS_PREFIX, array_len);
483470
tx_effects_hash_input[offset] = private_logs_prefix;
484471
offset += 1;
485472

486-
for j in 0..MAX_PRIVATE_LOGS_PER_TX {
473+
for j in 0..num_private_logs {
487474
for k in 0..PRIVATE_LOG_SIZE_IN_FIELDS {
488475
let index = offset + j * PRIVATE_LOG_SIZE_IN_FIELDS + k;
489476
tx_effects_hash_input[index] = private_logs[j].fields[k];
@@ -495,44 +482,33 @@ unconstrained fn get_tx_effects_hash_input_helper(
495482
// TODO(#8954): When logs are refactored into fields, we will append the values here
496483
// Currently appending the single log hash as an interim solution
497484
// UNENCRYPTED LOGS
498-
array_len = array_length(unencrypted_logs);
485+
let array_len = array_length(unencrypted_logs);
499486
if array_len != 0 {
500487
let unencrypted_logs_prefix = encode_blob_prefix(UNENCRYPTED_LOGS_PREFIX, array_len);
501488
tx_effects_hash_input[offset] = unencrypted_logs_prefix;
502489
offset += 1;
503490

504-
for j in 0..MAX_UNENCRYPTED_LOGS_PER_TX {
491+
for j in 0..array_len {
505492
tx_effects_hash_input[offset + j] = unencrypted_logs[j];
506493
}
507494
offset += array_len;
508495
}
509496

510497
// CONTRACT CLASS LOGS
511-
array_len = array_length(contract_class_logs);
498+
let array_len = array_length(contract_class_logs);
512499
if array_len != 0 {
513500
let contract_class_logs_prefix = encode_blob_prefix(CONTRACT_CLASS_LOGS_PREFIX, array_len);
514501
tx_effects_hash_input[offset] = contract_class_logs_prefix;
515502
offset += 1;
516503

517-
for j in 0..MAX_CONTRACT_CLASS_LOGS_PER_TX {
504+
for j in 0..array_len {
518505
tx_effects_hash_input[offset + j] = contract_class_logs[j];
519506
}
520507
offset += array_len;
521508
}
522509

523510
// Now we know the number of fields appended, we can assign the first value:
524-
// TX_START_PREFIX | 0 | txlen[0] txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revert_code
525-
// Start prefix is "tx_start".to_field() => 8 bytes
526-
let prefix_bytes = TX_START_PREFIX.to_be_bytes::<8>();
527-
let length_bytes = (offset as Field).to_be_bytes::<2>();
528-
// REVERT CODE
529-
tx_effects_hash_input[0] = field_from_bytes(
530-
array_concat(
531-
prefix_bytes,
532-
[0, length_bytes[0], length_bytes[1], 0, REVERT_CODE_PREFIX, 0, tx_effect.revert_code],
533-
),
534-
true,
535-
);
511+
tx_effects_hash_input[0] = generate_tx_start_field(offset as Field, revert_code);
536512

537513
tx_effects_hash_input
538514
}

0 commit comments

Comments
 (0)