1use std::sync::Arc;
2
3use derive_more::derive::From;
4use openvm_circuit::{
5 arch::{
6 AirInventory, AirInventoryError, ChipInventory, ChipInventoryError, ExecutionBridge,
7 ExecutorInventoryBuilder, ExecutorInventoryError, RowMajorMatrixArena, VmCircuitExtension,
8 VmExecutionExtension, VmProverExtension,
9 },
10 system::{memory::SharedMemoryHelper, SystemPort},
11};
12use openvm_circuit_derive::AnyEnum;
13use openvm_circuit_primitives::{
14 bitwise_op_lookup::{
15 BitwiseOperationLookupAir, BitwiseOperationLookupBus, BitwiseOperationLookupChip,
16 SharedBitwiseOperationLookupChip,
17 },
18 range_tuple::{
19 RangeTupleCheckerAir, RangeTupleCheckerBus, RangeTupleCheckerChip,
20 SharedRangeTupleCheckerChip,
21 },
22};
23use openvm_instructions::{program::DEFAULT_PC_STEP, LocalOpcode, PhantomDiscriminant};
24use openvm_rv32im_transpiler::{
25 BaseAluOpcode, BranchEqualOpcode, BranchLessThanOpcode, DivRemOpcode, LessThanOpcode,
26 MulHOpcode, MulOpcode, Rv32AuipcOpcode, Rv32HintStoreOpcode, Rv32JalLuiOpcode, Rv32JalrOpcode,
27 Rv32LoadStoreOpcode, Rv32Phantom, ShiftOpcode,
28};
29use openvm_stark_backend::{
30 config::{StarkGenericConfig, Val},
31 engine::StarkEngine,
32 p3_field::PrimeField32,
33 prover::cpu::{CpuBackend, CpuDevice},
34};
35use serde::{Deserialize, Serialize};
36use strum::IntoEnumIterator;
37
38use crate::{adapters::*, *};
39
40cfg_if::cfg_if! {
41 if #[cfg(feature = "cuda")] {
42 mod cuda;
43 pub use cuda::{
44 Rv32ImGpuProverExt as Rv32ImGpuProverExt,
45 };
46 } else {
47 pub use self::{
48 Rv32ImCpuProverExt as Rv32ImProverExt,
49 };
50 }
51}
52
53#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize)]
57pub struct Rv32I;
58
59#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize)]
61pub struct Rv32Io;
62
63#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
65pub struct Rv32M {
66 #[serde(default = "default_range_tuple_checker_sizes")]
67 pub range_tuple_checker_sizes: [u32; 2],
68}
69
70impl Default for Rv32M {
71 fn default() -> Self {
72 Self {
73 range_tuple_checker_sizes: default_range_tuple_checker_sizes(),
74 }
75 }
76}
77
78fn default_range_tuple_checker_sizes() -> [u32; 2] {
79 [1 << 8, 8 * (1 << 8)]
80}
81
82#[derive(Clone, From, AnyEnum, Executor, MeteredExecutor, PreflightExecutor)]
87#[cfg_attr(
88 feature = "aot",
89 derive(
90 openvm_circuit_derive::AotExecutor,
91 openvm_circuit_derive::AotMeteredExecutor
92 )
93)]
94pub enum Rv32IExecutor {
95 BaseAlu(Rv32BaseAluExecutor),
97 LessThan(Rv32LessThanExecutor),
98 Shift(Rv32ShiftExecutor),
99 LoadStore(Rv32LoadStoreExecutor),
100 LoadSignExtend(Rv32LoadSignExtendExecutor),
101 BranchEqual(Rv32BranchEqualExecutor),
102 BranchLessThan(Rv32BranchLessThanExecutor),
103 JalLui(Rv32JalLuiExecutor),
104 Jalr(Rv32JalrExecutor),
105 Auipc(Rv32AuipcExecutor),
106}
107
108#[derive(Clone, From, AnyEnum, Executor, MeteredExecutor, PreflightExecutor)]
110#[cfg_attr(
111 feature = "aot",
112 derive(
113 openvm_circuit_derive::AotExecutor,
114 openvm_circuit_derive::AotMeteredExecutor
115 )
116)]
117pub enum Rv32MExecutor {
118 Multiplication(Rv32MultiplicationExecutor),
119 MultiplicationHigh(Rv32MulHExecutor),
120 DivRem(Rv32DivRemExecutor),
121}
122
123#[derive(Clone, Copy, From, AnyEnum, Executor, MeteredExecutor, PreflightExecutor)]
125#[cfg_attr(
126 feature = "aot",
127 derive(
128 openvm_circuit_derive::AotExecutor,
129 openvm_circuit_derive::AotMeteredExecutor
130 )
131)]
132pub enum Rv32IoExecutor {
133 HintStore(Rv32HintStoreExecutor),
134}
135
136impl<F: PrimeField32> VmExecutionExtension<F> for Rv32I {
139 type Executor = Rv32IExecutor;
140
141 fn extend_execution(
142 &self,
143 inventory: &mut ExecutorInventoryBuilder<F, Rv32IExecutor>,
144 ) -> Result<(), ExecutorInventoryError> {
145 let pointer_max_bits = inventory.pointer_max_bits();
146
147 let base_alu =
148 Rv32BaseAluExecutor::new(Rv32BaseAluAdapterExecutor, BaseAluOpcode::CLASS_OFFSET);
149 inventory.add_executor(base_alu, BaseAluOpcode::iter().map(|x| x.global_opcode()))?;
150
151 let lt = LessThanExecutor::new(Rv32BaseAluAdapterExecutor, LessThanOpcode::CLASS_OFFSET);
152 inventory.add_executor(lt, LessThanOpcode::iter().map(|x| x.global_opcode()))?;
153
154 let shift = ShiftExecutor::new(Rv32BaseAluAdapterExecutor, ShiftOpcode::CLASS_OFFSET);
155 inventory.add_executor(shift, ShiftOpcode::iter().map(|x| x.global_opcode()))?;
156
157 let load_store = LoadStoreExecutor::new(
158 Rv32LoadStoreAdapterExecutor::new(pointer_max_bits),
159 Rv32LoadStoreOpcode::CLASS_OFFSET,
160 );
161 inventory.add_executor(
162 load_store,
163 Rv32LoadStoreOpcode::iter()
164 .take(Rv32LoadStoreOpcode::STOREB as usize + 1)
165 .map(|x| x.global_opcode()),
166 )?;
167
168 let load_sign_extend =
169 LoadSignExtendExecutor::new(Rv32LoadStoreAdapterExecutor::new(pointer_max_bits));
170 inventory.add_executor(
171 load_sign_extend,
172 [Rv32LoadStoreOpcode::LOADB, Rv32LoadStoreOpcode::LOADH].map(|x| x.global_opcode()),
173 )?;
174
175 let beq = BranchEqualExecutor::new(
176 Rv32BranchAdapterExecutor,
177 BranchEqualOpcode::CLASS_OFFSET,
178 DEFAULT_PC_STEP,
179 );
180 inventory.add_executor(beq, BranchEqualOpcode::iter().map(|x| x.global_opcode()))?;
181
182 let blt = BranchLessThanExecutor::new(
183 Rv32BranchAdapterExecutor,
184 BranchLessThanOpcode::CLASS_OFFSET,
185 );
186 inventory.add_executor(blt, BranchLessThanOpcode::iter().map(|x| x.global_opcode()))?;
187
188 let jal_lui = Rv32JalLuiExecutor::new(Rv32CondRdWriteAdapterExecutor::new(
189 Rv32RdWriteAdapterExecutor,
190 ));
191 inventory.add_executor(jal_lui, Rv32JalLuiOpcode::iter().map(|x| x.global_opcode()))?;
192
193 let jalr = Rv32JalrExecutor::new(Rv32JalrAdapterExecutor);
194 inventory.add_executor(jalr, Rv32JalrOpcode::iter().map(|x| x.global_opcode()))?;
195
196 let auipc = Rv32AuipcExecutor::new(Rv32RdWriteAdapterExecutor);
197 inventory.add_executor(auipc, Rv32AuipcOpcode::iter().map(|x| x.global_opcode()))?;
198
199 inventory.add_phantom_sub_executor(
201 phantom::Rv32HintInputSubEx,
202 PhantomDiscriminant(Rv32Phantom::HintInput as u16),
203 )?;
204 inventory.add_phantom_sub_executor(
205 phantom::Rv32HintRandomSubEx,
206 PhantomDiscriminant(Rv32Phantom::HintRandom as u16),
207 )?;
208 inventory.add_phantom_sub_executor(
209 phantom::Rv32PrintStrSubEx,
210 PhantomDiscriminant(Rv32Phantom::PrintStr as u16),
211 )?;
212 inventory.add_phantom_sub_executor(
213 phantom::Rv32HintLoadByKeySubEx,
214 PhantomDiscriminant(Rv32Phantom::HintLoadByKey as u16),
215 )?;
216
217 Ok(())
218 }
219}
220
221impl<SC: StarkGenericConfig> VmCircuitExtension<SC> for Rv32I {
222 fn extend_circuit(&self, inventory: &mut AirInventory<SC>) -> Result<(), AirInventoryError> {
223 let SystemPort {
224 execution_bus,
225 program_bus,
226 memory_bridge,
227 } = inventory.system().port();
228
229 let exec_bridge = ExecutionBridge::new(execution_bus, program_bus);
230 let range_checker = inventory.range_checker().bus;
231 let pointer_max_bits = inventory.pointer_max_bits();
232
233 let bitwise_lu = {
234 let existing_air = inventory.find_air::<BitwiseOperationLookupAir<8>>().next();
236 if let Some(air) = existing_air {
237 air.bus
238 } else {
239 let bus = BitwiseOperationLookupBus::new(inventory.new_bus_idx());
240 let air = BitwiseOperationLookupAir::<8>::new(bus);
241 inventory.add_air(air);
242 air.bus
243 }
244 };
245
246 let base_alu = Rv32BaseAluAir::new(
247 Rv32BaseAluAdapterAir::new(exec_bridge, memory_bridge, bitwise_lu),
248 BaseAluCoreAir::new(bitwise_lu, BaseAluOpcode::CLASS_OFFSET),
249 );
250 inventory.add_air(base_alu);
251
252 let lt = Rv32LessThanAir::new(
253 Rv32BaseAluAdapterAir::new(exec_bridge, memory_bridge, bitwise_lu),
254 LessThanCoreAir::new(bitwise_lu, LessThanOpcode::CLASS_OFFSET),
255 );
256 inventory.add_air(lt);
257
258 let shift = Rv32ShiftAir::new(
259 Rv32BaseAluAdapterAir::new(exec_bridge, memory_bridge, bitwise_lu),
260 ShiftCoreAir::new(bitwise_lu, range_checker, ShiftOpcode::CLASS_OFFSET),
261 );
262 inventory.add_air(shift);
263
264 let load_store = Rv32LoadStoreAir::new(
265 Rv32LoadStoreAdapterAir::new(
266 memory_bridge,
267 exec_bridge,
268 range_checker,
269 pointer_max_bits,
270 ),
271 LoadStoreCoreAir::new(Rv32LoadStoreOpcode::CLASS_OFFSET),
272 );
273 inventory.add_air(load_store);
274
275 let load_sign_extend = Rv32LoadSignExtendAir::new(
276 Rv32LoadStoreAdapterAir::new(
277 memory_bridge,
278 exec_bridge,
279 range_checker,
280 pointer_max_bits,
281 ),
282 LoadSignExtendCoreAir::new(range_checker),
283 );
284 inventory.add_air(load_sign_extend);
285
286 let beq = Rv32BranchEqualAir::new(
287 Rv32BranchAdapterAir::new(exec_bridge, memory_bridge),
288 BranchEqualCoreAir::new(BranchEqualOpcode::CLASS_OFFSET, DEFAULT_PC_STEP),
289 );
290 inventory.add_air(beq);
291
292 let blt = Rv32BranchLessThanAir::new(
293 Rv32BranchAdapterAir::new(exec_bridge, memory_bridge),
294 BranchLessThanCoreAir::new(bitwise_lu, BranchLessThanOpcode::CLASS_OFFSET),
295 );
296 inventory.add_air(blt);
297
298 let jal_lui = Rv32JalLuiAir::new(
299 Rv32CondRdWriteAdapterAir::new(Rv32RdWriteAdapterAir::new(memory_bridge, exec_bridge)),
300 Rv32JalLuiCoreAir::new(bitwise_lu),
301 );
302 inventory.add_air(jal_lui);
303
304 let jalr = Rv32JalrAir::new(
305 Rv32JalrAdapterAir::new(memory_bridge, exec_bridge),
306 Rv32JalrCoreAir::new(bitwise_lu, range_checker),
307 );
308 inventory.add_air(jalr);
309
310 let auipc = Rv32AuipcAir::new(
311 Rv32RdWriteAdapterAir::new(memory_bridge, exec_bridge),
312 Rv32AuipcCoreAir::new(bitwise_lu),
313 );
314 inventory.add_air(auipc);
315
316 Ok(())
317 }
318}
319
320pub struct Rv32ImCpuProverExt;
321impl<E, SC, RA> VmProverExtension<E, RA, Rv32I> for Rv32ImCpuProverExt
324where
325 SC: StarkGenericConfig,
326 E: StarkEngine<SC = SC, PB = CpuBackend<SC>, PD = CpuDevice<SC>>,
327 RA: RowMajorMatrixArena<Val<SC>>,
328 Val<SC>: PrimeField32,
329{
330 fn extend_prover(
331 &self,
332 _: &Rv32I,
333 inventory: &mut ChipInventory<SC, RA, CpuBackend<SC>>,
334 ) -> Result<(), ChipInventoryError> {
335 let range_checker = inventory.range_checker()?.clone();
336 let timestamp_max_bits = inventory.timestamp_max_bits();
337 let pointer_max_bits = inventory.airs().pointer_max_bits();
338 let mem_helper = SharedMemoryHelper::new(range_checker.clone(), timestamp_max_bits);
339
340 let bitwise_lu = {
341 let existing_chip = inventory
342 .find_chip::<SharedBitwiseOperationLookupChip<8>>()
343 .next();
344 if let Some(chip) = existing_chip {
345 chip.clone()
346 } else {
347 let air: &BitwiseOperationLookupAir<8> = inventory.next_air()?;
348 let chip = Arc::new(BitwiseOperationLookupChip::new(air.bus));
349 inventory.add_periphery_chip(chip.clone());
350 chip
351 }
352 };
353
354 inventory.next_air::<Rv32BaseAluAir>()?;
357 let base_alu = Rv32BaseAluChip::new(
358 BaseAluFiller::new(
359 Rv32BaseAluAdapterFiller::new(bitwise_lu.clone()),
360 bitwise_lu.clone(),
361 BaseAluOpcode::CLASS_OFFSET,
362 ),
363 mem_helper.clone(),
364 );
365 inventory.add_executor_chip(base_alu);
366
367 inventory.next_air::<Rv32LessThanAir>()?;
368 let lt = Rv32LessThanChip::new(
369 LessThanFiller::new(
370 Rv32BaseAluAdapterFiller::new(bitwise_lu.clone()),
371 bitwise_lu.clone(),
372 LessThanOpcode::CLASS_OFFSET,
373 ),
374 mem_helper.clone(),
375 );
376 inventory.add_executor_chip(lt);
377
378 inventory.next_air::<Rv32ShiftAir>()?;
379 let shift = Rv32ShiftChip::new(
380 ShiftFiller::new(
381 Rv32BaseAluAdapterFiller::new(bitwise_lu.clone()),
382 bitwise_lu.clone(),
383 range_checker.clone(),
384 ShiftOpcode::CLASS_OFFSET,
385 ),
386 mem_helper.clone(),
387 );
388 inventory.add_executor_chip(shift);
389
390 inventory.next_air::<Rv32LoadStoreAir>()?;
391 let load_store_chip = Rv32LoadStoreChip::new(
392 LoadStoreFiller::new(
393 Rv32LoadStoreAdapterFiller::new(pointer_max_bits, range_checker.clone()),
394 Rv32LoadStoreOpcode::CLASS_OFFSET,
395 ),
396 mem_helper.clone(),
397 );
398 inventory.add_executor_chip(load_store_chip);
399
400 inventory.next_air::<Rv32LoadSignExtendAir>()?;
401 let load_sign_extend = Rv32LoadSignExtendChip::new(
402 LoadSignExtendFiller::new(
403 Rv32LoadStoreAdapterFiller::new(pointer_max_bits, range_checker.clone()),
404 range_checker.clone(),
405 ),
406 mem_helper.clone(),
407 );
408 inventory.add_executor_chip(load_sign_extend);
409
410 inventory.next_air::<Rv32BranchEqualAir>()?;
411 let beq = Rv32BranchEqualChip::new(
412 BranchEqualFiller::new(
413 Rv32BranchAdapterFiller,
414 BranchEqualOpcode::CLASS_OFFSET,
415 DEFAULT_PC_STEP,
416 ),
417 mem_helper.clone(),
418 );
419 inventory.add_executor_chip(beq);
420
421 inventory.next_air::<Rv32BranchLessThanAir>()?;
422 let blt = Rv32BranchLessThanChip::new(
423 BranchLessThanFiller::new(
424 Rv32BranchAdapterFiller,
425 bitwise_lu.clone(),
426 BranchLessThanOpcode::CLASS_OFFSET,
427 ),
428 mem_helper.clone(),
429 );
430 inventory.add_executor_chip(blt);
431
432 inventory.next_air::<Rv32JalLuiAir>()?;
433 let jal_lui = Rv32JalLuiChip::new(
434 Rv32JalLuiFiller::new(
435 Rv32CondRdWriteAdapterFiller::new(Rv32RdWriteAdapterFiller),
436 bitwise_lu.clone(),
437 ),
438 mem_helper.clone(),
439 );
440 inventory.add_executor_chip(jal_lui);
441
442 inventory.next_air::<Rv32JalrAir>()?;
443 let jalr = Rv32JalrChip::new(
444 Rv32JalrFiller::new(
445 Rv32JalrAdapterFiller,
446 bitwise_lu.clone(),
447 range_checker.clone(),
448 ),
449 mem_helper.clone(),
450 );
451 inventory.add_executor_chip(jalr);
452
453 inventory.next_air::<Rv32AuipcAir>()?;
454 let auipc = Rv32AuipcChip::new(
455 Rv32AuipcFiller::new(Rv32RdWriteAdapterFiller, bitwise_lu.clone()),
456 mem_helper.clone(),
457 );
458 inventory.add_executor_chip(auipc);
459
460 Ok(())
461 }
462}
463
464impl<F> VmExecutionExtension<F> for Rv32M {
465 type Executor = Rv32MExecutor;
466
467 fn extend_execution(
468 &self,
469 inventory: &mut ExecutorInventoryBuilder<F, Rv32MExecutor>,
470 ) -> Result<(), ExecutorInventoryError> {
471 let mult =
472 Rv32MultiplicationExecutor::new(Rv32MultAdapterExecutor, MulOpcode::CLASS_OFFSET);
473 inventory.add_executor(mult, MulOpcode::iter().map(|x| x.global_opcode()))?;
474
475 let mul_h = Rv32MulHExecutor::new(Rv32MultAdapterExecutor, MulHOpcode::CLASS_OFFSET);
476 inventory.add_executor(mul_h, MulHOpcode::iter().map(|x| x.global_opcode()))?;
477
478 let div_rem = Rv32DivRemExecutor::new(Rv32MultAdapterExecutor, DivRemOpcode::CLASS_OFFSET);
479 inventory.add_executor(div_rem, DivRemOpcode::iter().map(|x| x.global_opcode()))?;
480
481 Ok(())
482 }
483}
484
485impl<SC: StarkGenericConfig> VmCircuitExtension<SC> for Rv32M {
486 fn extend_circuit(&self, inventory: &mut AirInventory<SC>) -> Result<(), AirInventoryError> {
487 let SystemPort {
488 execution_bus,
489 program_bus,
490 memory_bridge,
491 } = inventory.system().port();
492 let exec_bridge = ExecutionBridge::new(execution_bus, program_bus);
493
494 let bitwise_lu = {
495 let existing_air = inventory.find_air::<BitwiseOperationLookupAir<8>>().next();
496 if let Some(air) = existing_air {
497 air.bus
498 } else {
499 let bus = BitwiseOperationLookupBus::new(inventory.new_bus_idx());
500 let air = BitwiseOperationLookupAir::<8>::new(bus);
501 inventory.add_air(air);
502 air.bus
503 }
504 };
505
506 let range_tuple_checker = {
507 let existing_air = inventory.find_air::<RangeTupleCheckerAir<2>>().find(|c| {
508 c.bus.sizes[0] >= self.range_tuple_checker_sizes[0]
509 && c.bus.sizes[1] >= self.range_tuple_checker_sizes[1]
510 });
511 if let Some(air) = existing_air {
512 air.bus
513 } else {
514 let bus = RangeTupleCheckerBus::new(
515 inventory.new_bus_idx(),
516 self.range_tuple_checker_sizes,
517 );
518 let air = RangeTupleCheckerAir { bus };
519 inventory.add_air(air);
520 air.bus
521 }
522 };
523
524 let mult = Rv32MultiplicationAir::new(
525 Rv32MultAdapterAir::new(exec_bridge, memory_bridge),
526 MultiplicationCoreAir::new(range_tuple_checker, MulOpcode::CLASS_OFFSET),
527 );
528 inventory.add_air(mult);
529
530 let mul_h = Rv32MulHAir::new(
531 Rv32MultAdapterAir::new(exec_bridge, memory_bridge),
532 MulHCoreAir::new(bitwise_lu, range_tuple_checker),
533 );
534 inventory.add_air(mul_h);
535
536 let div_rem = Rv32DivRemAir::new(
537 Rv32MultAdapterAir::new(exec_bridge, memory_bridge),
538 DivRemCoreAir::new(bitwise_lu, range_tuple_checker, DivRemOpcode::CLASS_OFFSET),
539 );
540 inventory.add_air(div_rem);
541
542 Ok(())
543 }
544}
545
546impl<E, SC, RA> VmProverExtension<E, RA, Rv32M> for Rv32ImCpuProverExt
549where
550 SC: StarkGenericConfig,
551 E: StarkEngine<SC = SC, PB = CpuBackend<SC>, PD = CpuDevice<SC>>,
552 RA: RowMajorMatrixArena<Val<SC>>,
553 Val<SC>: PrimeField32,
554{
555 fn extend_prover(
556 &self,
557 extension: &Rv32M,
558 inventory: &mut ChipInventory<SC, RA, CpuBackend<SC>>,
559 ) -> Result<(), ChipInventoryError> {
560 let range_checker = inventory.range_checker()?.clone();
561 let timestamp_max_bits = inventory.timestamp_max_bits();
562 let mem_helper = SharedMemoryHelper::new(range_checker.clone(), timestamp_max_bits);
563
564 let bitwise_lu = {
565 let existing_chip = inventory
566 .find_chip::<SharedBitwiseOperationLookupChip<8>>()
567 .next();
568 if let Some(chip) = existing_chip {
569 chip.clone()
570 } else {
571 let air: &BitwiseOperationLookupAir<8> = inventory.next_air()?;
572 let chip = Arc::new(BitwiseOperationLookupChip::new(air.bus));
573 inventory.add_periphery_chip(chip.clone());
574 chip
575 }
576 };
577
578 let range_tuple_checker = {
579 let existing_chip = inventory
580 .find_chip::<SharedRangeTupleCheckerChip<2>>()
581 .find(|c| {
582 c.bus().sizes[0] >= extension.range_tuple_checker_sizes[0]
583 && c.bus().sizes[1] >= extension.range_tuple_checker_sizes[1]
584 });
585 if let Some(chip) = existing_chip {
586 chip.clone()
587 } else {
588 let air: &RangeTupleCheckerAir<2> = inventory.next_air()?;
589 let chip = SharedRangeTupleCheckerChip::new(RangeTupleCheckerChip::new(air.bus));
590 inventory.add_periphery_chip(chip.clone());
591 chip
592 }
593 };
594
595 inventory.next_air::<Rv32MultiplicationAir>()?;
598 let mult = Rv32MultiplicationChip::new(
599 MultiplicationFiller::new(
600 Rv32MultAdapterFiller,
601 range_tuple_checker.clone(),
602 MulOpcode::CLASS_OFFSET,
603 ),
604 mem_helper.clone(),
605 );
606 inventory.add_executor_chip(mult);
607
608 inventory.next_air::<Rv32MulHAir>()?;
609 let mul_h = Rv32MulHChip::new(
610 MulHFiller::new(
611 Rv32MultAdapterFiller,
612 bitwise_lu.clone(),
613 range_tuple_checker.clone(),
614 ),
615 mem_helper.clone(),
616 );
617 inventory.add_executor_chip(mul_h);
618
619 inventory.next_air::<Rv32DivRemAir>()?;
620 let div_rem = Rv32DivRemChip::new(
621 DivRemFiller::new(
622 Rv32MultAdapterFiller,
623 bitwise_lu.clone(),
624 range_tuple_checker.clone(),
625 DivRemOpcode::CLASS_OFFSET,
626 ),
627 mem_helper.clone(),
628 );
629 inventory.add_executor_chip(div_rem);
630
631 Ok(())
632 }
633}
634
635impl<F> VmExecutionExtension<F> for Rv32Io {
636 type Executor = Rv32IoExecutor;
637
638 fn extend_execution(
639 &self,
640 inventory: &mut ExecutorInventoryBuilder<F, Rv32IoExecutor>,
641 ) -> Result<(), ExecutorInventoryError> {
642 let pointer_max_bits = inventory.pointer_max_bits();
643 let hint_store =
644 Rv32HintStoreExecutor::new(pointer_max_bits, Rv32HintStoreOpcode::CLASS_OFFSET);
645 inventory.add_executor(
646 hint_store,
647 Rv32HintStoreOpcode::iter().map(|x| x.global_opcode()),
648 )?;
649
650 Ok(())
651 }
652}
653
654impl<SC: StarkGenericConfig> VmCircuitExtension<SC> for Rv32Io {
655 fn extend_circuit(&self, inventory: &mut AirInventory<SC>) -> Result<(), AirInventoryError> {
656 let SystemPort {
657 execution_bus,
658 program_bus,
659 memory_bridge,
660 } = inventory.system().port();
661
662 let exec_bridge = ExecutionBridge::new(execution_bus, program_bus);
663 let pointer_max_bits = inventory.pointer_max_bits();
664
665 let bitwise_lu = {
666 let existing_air = inventory.find_air::<BitwiseOperationLookupAir<8>>().next();
667 if let Some(air) = existing_air {
668 air.bus
669 } else {
670 let bus = BitwiseOperationLookupBus::new(inventory.new_bus_idx());
671 let air = BitwiseOperationLookupAir::<8>::new(bus);
672 inventory.add_air(air);
673 air.bus
674 }
675 };
676
677 let hint_store = Rv32HintStoreAir::new(
678 exec_bridge,
679 memory_bridge,
680 bitwise_lu,
681 Rv32HintStoreOpcode::CLASS_OFFSET,
682 pointer_max_bits,
683 );
684 inventory.add_air(hint_store);
685
686 Ok(())
687 }
688}
689
690impl<E, SC, RA> VmProverExtension<E, RA, Rv32Io> for Rv32ImCpuProverExt
693where
694 SC: StarkGenericConfig,
695 E: StarkEngine<SC = SC, PB = CpuBackend<SC>, PD = CpuDevice<SC>>,
696 RA: RowMajorMatrixArena<Val<SC>>,
697 Val<SC>: PrimeField32,
698{
699 fn extend_prover(
700 &self,
701 _: &Rv32Io,
702 inventory: &mut ChipInventory<SC, RA, CpuBackend<SC>>,
703 ) -> Result<(), ChipInventoryError> {
704 let range_checker = inventory.range_checker()?.clone();
705 let timestamp_max_bits = inventory.timestamp_max_bits();
706 let mem_helper = SharedMemoryHelper::new(range_checker.clone(), timestamp_max_bits);
707 let pointer_max_bits = inventory.airs().pointer_max_bits();
708
709 let bitwise_lu = {
710 let existing_chip = inventory
711 .find_chip::<SharedBitwiseOperationLookupChip<8>>()
712 .next();
713 if let Some(chip) = existing_chip {
714 chip.clone()
715 } else {
716 let air: &BitwiseOperationLookupAir<8> = inventory.next_air()?;
717 let chip = Arc::new(BitwiseOperationLookupChip::new(air.bus));
718 inventory.add_periphery_chip(chip.clone());
719 chip
720 }
721 };
722
723 inventory.next_air::<Rv32HintStoreAir>()?;
724 let hint_store = Rv32HintStoreChip::new(
725 Rv32HintStoreFiller::new(pointer_max_bits, bitwise_lu.clone()),
726 mem_helper.clone(),
727 );
728 inventory.add_executor_chip(hint_store);
729
730 Ok(())
731 }
732}
733
734mod phantom {
736 use eyre::bail;
737 use openvm_circuit::{
738 arch::{PhantomSubExecutor, Streams},
739 system::memory::online::GuestMemory,
740 };
741 use openvm_instructions::PhantomDiscriminant;
742 use openvm_stark_backend::p3_field::{Field, PrimeField32};
743 use rand::{rngs::StdRng, Rng};
744
745 use crate::adapters::{memory_read, read_rv32_register};
746
747 pub struct Rv32HintInputSubEx;
748 pub struct Rv32HintRandomSubEx;
749 pub struct Rv32PrintStrSubEx;
750 pub struct Rv32HintLoadByKeySubEx;
751
752 impl<F: Field> PhantomSubExecutor<F> for Rv32HintInputSubEx {
753 fn phantom_execute(
754 &self,
755 _: &GuestMemory,
756 streams: &mut Streams<F>,
757 _: &mut StdRng,
758 _: PhantomDiscriminant,
759 _: u32,
760 _: u32,
761 _: u16,
762 ) -> eyre::Result<()> {
763 let mut hint = match streams.input_stream.pop_front() {
764 Some(hint) => hint,
765 None => {
766 bail!("EndOfInputStream");
767 }
768 };
769 streams.hint_stream.clear();
770 streams.hint_stream.extend(
771 (hint.len() as u32)
772 .to_le_bytes()
773 .iter()
774 .map(|b| F::from_canonical_u8(*b)),
775 );
776 let capacity = hint.len().div_ceil(4) * 4;
778 hint.resize(capacity, F::ZERO);
779 streams.hint_stream.extend(hint);
780 Ok(())
781 }
782 }
783
784 impl<F: PrimeField32> PhantomSubExecutor<F> for Rv32HintRandomSubEx {
785 fn phantom_execute(
786 &self,
787 memory: &GuestMemory,
788 streams: &mut Streams<F>,
789 rng: &mut StdRng,
790 _: PhantomDiscriminant,
791 a: u32,
792 _: u32,
793 _: u16,
794 ) -> eyre::Result<()> {
795 static WARN_ONCE: std::sync::Once = std::sync::Once::new();
796 WARN_ONCE.call_once(|| {
797 eprintln!("WARNING: Using fixed-seed RNG for deterministic randomness. Consider security implications for your use case.");
798 });
799
800 let len = read_rv32_register(memory, a) as usize;
801 streams.hint_stream.clear();
802 streams.hint_stream.extend(
803 std::iter::repeat_with(|| F::from_canonical_u8(rng.gen::<u8>())).take(len * 4),
804 );
805 Ok(())
806 }
807 }
808
809 impl<F: PrimeField32> PhantomSubExecutor<F> for Rv32PrintStrSubEx {
810 fn phantom_execute(
811 &self,
812 memory: &GuestMemory,
813 _: &mut Streams<F>,
814 _: &mut StdRng,
815 _: PhantomDiscriminant,
816 a: u32,
817 b: u32,
818 _: u16,
819 ) -> eyre::Result<()> {
820 let rd = read_rv32_register(memory, a);
821 let rs1 = read_rv32_register(memory, b);
822 let bytes = (0..rs1)
823 .map(|i| memory_read::<1>(memory, 2, rd + i)[0])
824 .collect::<Vec<u8>>();
825 let peeked_str = String::from_utf8(bytes)?;
826 print!("{peeked_str}");
827 Ok(())
828 }
829 }
830
831 impl<F: PrimeField32> PhantomSubExecutor<F> for Rv32HintLoadByKeySubEx {
832 fn phantom_execute(
833 &self,
834 memory: &GuestMemory,
835 streams: &mut Streams<F>,
836 _: &mut StdRng,
837 _: PhantomDiscriminant,
838 a: u32,
839 b: u32,
840 _: u16,
841 ) -> eyre::Result<()> {
842 let ptr = read_rv32_register(memory, a);
843 let len = read_rv32_register(memory, b);
844 let key: Vec<u8> = (0..len)
845 .map(|i| memory_read::<1>(memory, 2, ptr + i)[0])
846 .collect();
847 if let Some(val) = streams.kv_store.get(&key) {
848 let to_push = hint_load_by_key_decode::<F>(val);
849 for input in to_push.into_iter().rev() {
850 streams.input_stream.push_front(input);
851 }
852 } else {
853 bail!("Rv32HintLoadByKey: key not found");
854 }
855 Ok(())
856 }
857 }
858
859 pub fn hint_load_by_key_decode<F: PrimeField32>(value: &[u8]) -> Vec<Vec<F>> {
860 let mut offset = 0;
861 let len = extract_u32(value, offset) as usize;
862 offset += 4;
863 let mut ret = Vec::with_capacity(len);
864 for _ in 0..len {
865 let v_len = extract_u32(value, offset) as usize;
866 offset += 4;
867 let v = (0..v_len)
868 .map(|_| {
869 let ret = F::from_canonical_u32(extract_u32(value, offset));
870 offset += 4;
871 ret
872 })
873 .collect();
874 ret.push(v);
875 }
876 ret
877 }
878
879 fn extract_u32(value: &[u8], offset: usize) -> u32 {
880 u32::from_le_bytes(value[offset..offset + 4].try_into().unwrap())
881 }
882}