@@ -28,6 +28,7 @@ using FF = AvmFlavorSettings::FF;
28
28
using C = Column;
29
29
using instr_fetching = bb::avm2::instr_fetching<FF>;
30
30
using simulation::Instruction;
31
+ using simulation::InstructionFetchingEvent;
31
32
using simulation::Operand;
32
33
using testing::random_bytes;
33
34
@@ -47,7 +48,7 @@ TEST(InstrFetchingConstrainingTest, Add8WithTraceGen)
47
48
.operands = { Operand::u8 (0x34 ), Operand::u8 (0x35 ), Operand::u8 (0x36 ) },
48
49
};
49
50
50
- std::vector<uint8_t > bytecode = add_8_instruction.encode ();
51
+ std::vector<uint8_t > bytecode = add_8_instruction.serialize ();
51
52
52
53
builder.process_instruction_fetching ({ { .bytecode_id = 1 ,
53
54
.pc = 0 ,
@@ -77,7 +78,7 @@ TEST(InstrFetchingConstrainingTest, EcaddWithTraceGen)
77
78
Operand::u16 (0x127f ) },
78
79
};
79
80
80
- std::vector<uint8_t > bytecode = ecadd_instruction.encode ();
81
+ std::vector<uint8_t > bytecode = ecadd_instruction.serialize ();
81
82
builder.process_instruction_fetching ({ { .bytecode_id = 1 ,
82
83
.pc = 0 ,
83
84
.instruction = ecadd_instruction,
@@ -89,33 +90,33 @@ TEST(InstrFetchingConstrainingTest, EcaddWithTraceGen)
89
90
}
90
91
91
92
// Helper routine generating a vector of instruction fetching events for each
92
- // opcode. Note that operands of type TAG might fall outside of their valid range.
93
- std::vector<simulation:: InstructionFetchingEvent> gen_instr_events_each_opcode ()
93
+ // opcode.
94
+ std::vector<InstructionFetchingEvent> gen_instr_events_each_opcode ()
94
95
{
95
96
std::vector<uint8_t > bytecode;
96
- std::vector<uint32_t > pc_positions ;
97
+ std::vector<Instruction> instructions ;
97
98
constexpr auto num_opcodes = static_cast <size_t >(WireOpCode::LAST_OPCODE_SENTINEL);
98
- pc_positions.reserve (num_opcodes);
99
+ instructions.reserve (num_opcodes);
100
+ std::array<uint32_t , num_opcodes> pc_positions;
99
101
100
102
for (size_t i = 0 ; i < num_opcodes; i++) {
101
- pc_positions.emplace_back ( static_cast <uint32_t >(bytecode.size () ));
102
- bytecode. emplace_back (i );
103
- const auto instruction_bytes =
104
- random_bytes (WIRE_INSTRUCTION_SPEC. at ( static_cast <WireOpCode>(i)). size_in_bytes - 1 );
103
+ pc_positions.at (i) = static_cast <uint32_t >(bytecode.size ());
104
+ const auto instr = testing::random_instruction ( static_cast <WireOpCode>(i) );
105
+ instructions. emplace_back (instr);
106
+ const auto instruction_bytes = instr. serialize ( );
105
107
bytecode.insert (bytecode.end (),
106
108
std::make_move_iterator (instruction_bytes.begin ()),
107
109
std::make_move_iterator (instruction_bytes.end ()));
108
110
}
109
111
110
- const auto bytecode_ptr = std::make_shared<std::vector<uint8_t >>(bytecode);
112
+ const auto bytecode_ptr = std::make_shared<std::vector<uint8_t >>(std::move (bytecode));
113
+ // Always use *bytecode_ptr from now on instead of bytecode as this one was moved.
111
114
112
- std::vector<simulation:: InstructionFetchingEvent> instr_events;
115
+ std::vector<InstructionFetchingEvent> instr_events;
113
116
instr_events.reserve (num_opcodes);
114
-
115
117
for (size_t i = 0 ; i < num_opcodes; i++) {
116
- const auto instr = simulation::decode_instruction (bytecode, pc_positions.at (i));
117
- instr_events.emplace_back (simulation::InstructionFetchingEvent{
118
- .bytecode_id = 1 , .pc = pc_positions.at (i), .instruction = instr, .bytecode = bytecode_ptr });
118
+ instr_events.emplace_back (InstructionFetchingEvent{
119
+ .bytecode_id = 1 , .pc = pc_positions.at (i), .instruction = instructions.at (i), .bytecode = bytecode_ptr });
119
120
}
120
121
return instr_events;
121
122
}
@@ -149,20 +150,20 @@ TEST(InstrFetchingConstrainingTest, NegativeWrongOperand)
149
150
instr_fetching::SR_OP6_BYTES_DECOMPOSITION, instr_fetching::SR_OP7_BYTES_DECOMPOSITION,
150
151
};
151
152
152
- const std::vector<C > operand_cols = {
153
+ constexpr std::array<C, 8 > operand_cols = {
153
154
C::instr_fetching_indirect, C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3,
154
155
C::instr_fetching_op4, C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7,
155
156
};
156
157
157
158
for (const auto & opcode : opcodes) {
158
159
TestTraceContainer trace;
159
160
const auto instr = testing::random_instruction (opcode);
160
- builder.process_instruction_fetching ({ simulation::InstructionFetchingEvent{
161
- .bytecode_id = 1 ,
162
- .pc = 0 ,
163
- .instruction = instr,
164
- .bytecode = std::make_shared<std::vector<uint8_t >>(instr.encode ()) } },
165
- trace);
161
+ builder.process_instruction_fetching (
162
+ { { .bytecode_id = 1 ,
163
+ .pc = 0 ,
164
+ .instruction = instr,
165
+ .bytecode = std::make_shared<std::vector<uint8_t >>(instr.serialize ()) } },
166
+ trace);
166
167
check_relation<instr_fetching>(trace);
167
168
168
169
EXPECT_EQ (trace.get_num_rows (), 1 );
@@ -210,7 +211,7 @@ TEST(InstrFetchingConstrainingTest, BcDecompositionInteractions)
210
211
211
212
const auto instr_fetch_events = gen_instr_events_each_opcode ();
212
213
bytecode_builder.process_instruction_fetching (instr_fetch_events, trace);
213
- bytecode_builder.process_decomposition ({ simulation::BytecodeDecompositionEvent {
214
+ bytecode_builder.process_decomposition ({ {
214
215
.bytecode_id = instr_fetch_events.at (0 ).bytecode_id ,
215
216
.bytecode = instr_fetch_events.at (0 ).bytecode ,
216
217
} },
@@ -226,6 +227,7 @@ TEST(InstrFetchingConstrainingTest, BcDecompositionInteractions)
226
227
TEST (InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions)
227
228
{
228
229
using wire_instr_spec_lookup = lookup_instr_fetching_wire_instruction_info_relation<FF>;
230
+ using tracegen::LookupIntoIndexedByClk;
229
231
230
232
BytecodeTraceBuilder bytecode_builder;
231
233
PrecomputedTraceBuilder precomputed_builder;
@@ -238,19 +240,20 @@ TEST(InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions
238
240
TestTraceContainer trace;
239
241
const auto instr = testing::random_instruction (opcode);
240
242
bytecode_builder.process_instruction_fetching (
241
- { simulation::InstructionFetchingEvent{ .bytecode_id = 1 ,
242
- .pc = 0 ,
243
- .instruction = instr,
244
- .bytecode =
245
- std::make_shared<std::vector<uint8_t >>(instr.encode ()) } },
243
+ { { .bytecode_id = 1 ,
244
+ .pc = 0 ,
245
+ .instruction = instr,
246
+ .bytecode = std::make_shared<std::vector<uint8_t >>(instr.serialize ()) } },
246
247
trace);
247
248
precomputed_builder.process_wire_instruction_spec (trace);
248
249
precomputed_builder.process_misc (trace, trace.get_num_rows ()); // Limit to the number of rows we need.
249
250
250
- tracegen::LookupIntoIndexedByClk<wire_instr_spec_lookup::Settings>().process (trace);
251
+ LookupIntoIndexedByClk<wire_instr_spec_lookup::Settings>().process (trace);
252
+
253
+ ASSERT_EQ (trace.get (C::lookup_instr_fetching_wire_instruction_info_counts, static_cast <uint32_t >(opcode)), 1 );
251
254
check_interaction<wire_instr_spec_lookup>(trace);
252
255
253
- const std::vector<C > mutated_cols = {
256
+ constexpr std::array<C, 20 > mutated_cols = {
254
257
C::instr_fetching_exec_opcode, C::instr_fetching_instr_size_in_bytes, C::instr_fetching_sel_op_dc_0,
255
258
C::instr_fetching_sel_op_dc_1, C::instr_fetching_sel_op_dc_2, C::instr_fetching_sel_op_dc_3,
256
259
C::instr_fetching_sel_op_dc_4, C::instr_fetching_sel_op_dc_5, C::instr_fetching_sel_op_dc_6,
@@ -265,6 +268,11 @@ TEST(InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions
265
268
auto mutated_trace = trace;
266
269
const FF mutated_value = trace.get (col, 0 ) + 1 ; // Mutate to value + 1
267
270
mutated_trace.set (col, 0 , mutated_value);
271
+
272
+ // We do not need to re-run LookupIntoIndexedByClk<wire_instr_spec_lookup::Settings>().process(trace);
273
+ // because we never mutate the indexing column for this lookup (clk) and for this lookup
274
+ // find_in_dst only uses column C::instr_fetching_bd0 mapped to (clk). So, the counts are still valid.
275
+
268
276
EXPECT_THROW_WITH_MESSAGE (check_interaction<wire_instr_spec_lookup>(mutated_trace),
269
277
" Relation.*WIRE_INSTRUCTION_INFO.* ACCUMULATION.* is non-zero" );
270
278
}
@@ -275,6 +283,7 @@ TEST(InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions
275
283
TEST (InstrFetchingConstrainingTest, NegativeWrongBcDecompositionInteractions)
276
284
{
277
285
using bc_decomposition_lookup = lookup_instr_fetching_bytes_from_bc_dec_relation<FF>;
286
+ using tracegen::LookupIntoDynamicTableSequential;
278
287
279
288
TestTraceContainer trace;
280
289
BytecodeTraceBuilder bytecode_builder;
@@ -286,24 +295,25 @@ TEST(InstrFetchingConstrainingTest, NegativeWrongBcDecompositionInteractions)
286
295
for (const auto & opcode : opcodes) {
287
296
TestTraceContainer trace;
288
297
const auto instr = testing::random_instruction (opcode);
289
- auto bytecode_ptr = std::make_shared<std::vector<uint8_t >>(instr.encode ());
290
- bytecode_builder.process_instruction_fetching ({ simulation::InstructionFetchingEvent {
298
+ auto bytecode_ptr = std::make_shared<std::vector<uint8_t >>(instr.serialize ());
299
+ bytecode_builder.process_instruction_fetching ({ {
291
300
.bytecode_id = 1 ,
292
301
.pc = 0 ,
293
302
.instruction = instr,
294
303
.bytecode = bytecode_ptr,
295
304
} },
296
305
trace);
297
- bytecode_builder.process_decomposition ({ simulation::BytecodeDecompositionEvent {
306
+ bytecode_builder.process_decomposition ({ {
298
307
.bytecode_id = 1 ,
299
308
.bytecode = bytecode_ptr,
300
309
} },
301
310
trace);
302
311
303
- tracegen::LookupIntoDynamicTableSequential<bc_decomposition_lookup::Settings>().process (trace);
304
- check_interaction<bc_decomposition_lookup>(trace);
312
+ auto valid_trace = trace; // Keep original trace before lookup processing
313
+ LookupIntoDynamicTableSequential<bc_decomposition_lookup::Settings>().process (valid_trace);
314
+ check_interaction<bc_decomposition_lookup>(valid_trace);
305
315
306
- const std::vector<C > mutated_cols = {
316
+ constexpr std::array<C, 39 > mutated_cols = {
307
317
C::instr_fetching_pc, C::instr_fetching_bytecode_id, C::instr_fetching_bd0, C::instr_fetching_bd1,
308
318
C::instr_fetching_bd2, C::instr_fetching_bd3, C::instr_fetching_bd4, C::instr_fetching_bd5,
309
319
C::instr_fetching_bd6, C::instr_fetching_bd7, C::instr_fetching_bd8, C::instr_fetching_bd9,
@@ -321,6 +331,13 @@ TEST(InstrFetchingConstrainingTest, NegativeWrongBcDecompositionInteractions)
321
331
auto mutated_trace = trace;
322
332
const FF mutated_value = trace.get (col, 0 ) + 1 ; // Mutate to value + 1
323
333
mutated_trace.set (col, 0 , mutated_value);
334
+
335
+ // This sets the length of the inverse polynomial via SetDummyInverses, so we still need to call this even
336
+ // though we know it will fail.
337
+ EXPECT_THROW_WITH_MESSAGE (
338
+ LookupIntoDynamicTableSequential<bc_decomposition_lookup::Settings>().process (mutated_trace),
339
+ " Failed.*BYTES_FROM_BC_DEC. Could not find tuple in destination." );
340
+
324
341
EXPECT_THROW_WITH_MESSAGE (check_interaction<bc_decomposition_lookup>(mutated_trace),
325
342
" Relation.*BYTES_FROM_BC_DEC.* ACCUMULATION.* is non-zero" );
326
343
}
0 commit comments