1use alu_native_adapter::{AluNativeAdapterAir, AluNativeAdapterExecutor};
2use branch_native_adapter::{BranchNativeAdapterAir, BranchNativeAdapterExecutor};
3use convert_adapter::{ConvertAdapterAir, ConvertAdapterExecutor};
4use derive_more::derive::From;
5use loadstore_native_adapter::{NativeLoadStoreAdapterAir, NativeLoadStoreAdapterExecutor};
6use native_vectorized_adapter::{NativeVectorizedAdapterAir, NativeVectorizedAdapterExecutor};
7use openvm_circuit::{
8 arch::{
9 AirInventory, AirInventoryError, ChipInventory, ChipInventoryError, ExecutionBridge,
10 ExecutorInventoryBuilder, ExecutorInventoryError, RowMajorMatrixArena, VmCircuitExtension,
11 VmExecutionExtension, VmField, VmProverExtension,
12 },
13 system::{memory::SharedMemoryHelper, SystemPort},
14};
15use openvm_circuit_derive::{AnyEnum, Executor, MeteredExecutor, PreflightExecutor};
16use openvm_instructions::{program::DEFAULT_PC_STEP, LocalOpcode, PhantomDiscriminant};
17use openvm_native_compiler::{
18 CastfOpcode, FieldArithmeticOpcode, FieldExtensionOpcode, FriOpcode, NativeBranchEqualOpcode,
19 NativeJalOpcode, NativeLoadStore4Opcode, NativeLoadStoreOpcode, NativePhantom,
20 NativeRangeCheckOpcode, Poseidon2Opcode, VerifyBatchOpcode, BLOCK_LOAD_STORE_SIZE,
21};
22use openvm_poseidon2_air::Poseidon2Config;
23use openvm_rv32im_circuit::BranchEqualCoreAir;
24use openvm_stark_backend::{
25 config::{StarkGenericConfig, Val},
26 p3_field::PrimeField32,
27 prover::cpu::{CpuBackend, CpuDevice},
28};
29use openvm_stark_sdk::engine::StarkEngine;
30use serde::{Deserialize, Serialize};
31use strum::IntoEnumIterator;
32
33use crate::{
34 adapters::*,
35 branch_eq::{
36 NativeBranchEqAir, NativeBranchEqChip, NativeBranchEqExecutor, NativeBranchEqualFiller,
37 },
38 castf::{CastFAir, CastFChip, CastFCoreAir, CastFCoreFiller, CastFExecutor},
39 field_arithmetic::{
40 FieldArithmeticAir, FieldArithmeticChip, FieldArithmeticCoreAir, FieldArithmeticCoreFiller,
41 FieldArithmeticExecutor,
42 },
43 field_extension::{
44 FieldExtensionAir, FieldExtensionChip, FieldExtensionCoreAir, FieldExtensionCoreFiller,
45 FieldExtensionExecutor,
46 },
47 fri::{
48 FriReducedOpeningAir, FriReducedOpeningChip, FriReducedOpeningExecutor,
49 FriReducedOpeningFiller,
50 },
51 jal_rangecheck::{
52 JalRangeCheckAir, JalRangeCheckExecutor, JalRangeCheckFiller, NativeJalRangeCheckChip,
53 },
54 loadstore::{
55 NativeLoadStoreAir, NativeLoadStoreChip, NativeLoadStoreCoreAir, NativeLoadStoreCoreFiller,
56 NativeLoadStoreExecutor,
57 },
58 phantom::*,
59 poseidon2::{
60 air::{NativePoseidon2Air, VerifyBatchBus},
61 chip::{NativePoseidon2Executor, NativePoseidon2Filler},
62 NativePoseidon2Chip,
63 },
64};
65
66cfg_if::cfg_if! {
67 if #[cfg(feature = "cuda")] {
68 mod cuda;
69 pub use self::cuda::*;
70 pub use self::cuda::{
71 NativeGpuProverExt as NativeProverExt,
72 };
73 pub type NativeBuilder = crate::NativeGpuBuilder;
74 } else {
75 pub use self::{
76 NativeCpuProverExt as NativeProverExt,
77 };
78 pub type NativeBuilder = crate::NativeCpuBuilder;
79 }
80}
81
82#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize)]
85pub struct Native;
86
87#[derive(Clone, From, AnyEnum, Executor, MeteredExecutor, PreflightExecutor)]
88#[cfg_attr(
89 feature = "aot",
90 derive(
91 openvm_circuit_derive::AotExecutor,
92 openvm_circuit_derive::AotMeteredExecutor
93 )
94)]
95pub enum NativeExecutor<F: VmField> {
96 LoadStore(NativeLoadStoreExecutor<1>),
97 BlockLoadStore(NativeLoadStoreExecutor<BLOCK_LOAD_STORE_SIZE>),
98 BranchEqual(NativeBranchEqExecutor),
99 Jal(JalRangeCheckExecutor),
100 FieldArithmetic(FieldArithmeticExecutor),
101 FieldExtension(FieldExtensionExecutor),
102 FriReducedOpening(FriReducedOpeningExecutor),
103 VerifyBatch(NativePoseidon2Executor<F, 1>),
104}
105
106impl<F: VmField> VmExecutionExtension<F> for Native {
107 type Executor = NativeExecutor<F>;
108
109 fn extend_execution(
110 &self,
111 inventory: &mut ExecutorInventoryBuilder<F, NativeExecutor<F>>,
112 ) -> Result<(), ExecutorInventoryError> {
113 let load_store = NativeLoadStoreExecutor::<1>::new(
114 NativeLoadStoreAdapterExecutor::new(NativeLoadStoreOpcode::CLASS_OFFSET),
115 NativeLoadStoreOpcode::CLASS_OFFSET,
116 );
117 inventory.add_executor(
118 load_store,
119 NativeLoadStoreOpcode::iter().map(|x| x.global_opcode()),
120 )?;
121
122 let block_load_store = NativeLoadStoreExecutor::<BLOCK_LOAD_STORE_SIZE>::new(
123 NativeLoadStoreAdapterExecutor::new(NativeLoadStore4Opcode::CLASS_OFFSET),
124 NativeLoadStore4Opcode::CLASS_OFFSET,
125 );
126 inventory.add_executor(
127 block_load_store,
128 NativeLoadStore4Opcode::iter().map(|x| x.global_opcode()),
129 )?;
130
131 let branch_equal = NativeBranchEqExecutor::new(
132 BranchNativeAdapterExecutor::new(),
133 NativeBranchEqualOpcode::CLASS_OFFSET,
134 DEFAULT_PC_STEP,
135 );
136 inventory.add_executor(
137 branch_equal,
138 NativeBranchEqualOpcode::iter().map(|x| x.global_opcode()),
139 )?;
140
141 let jal_rangecheck = JalRangeCheckExecutor;
142 inventory.add_executor(
143 jal_rangecheck,
144 [
145 NativeJalOpcode::JAL.global_opcode(),
146 NativeRangeCheckOpcode::RANGE_CHECK.global_opcode(),
147 ],
148 )?;
149
150 let field_arithmetic = FieldArithmeticExecutor::new(AluNativeAdapterExecutor::new());
151 inventory.add_executor(
152 field_arithmetic,
153 FieldArithmeticOpcode::iter().map(|x| x.global_opcode()),
154 )?;
155
156 let field_extension = FieldExtensionExecutor::new(NativeVectorizedAdapterExecutor::new());
157 inventory.add_executor(
158 field_extension,
159 FieldExtensionOpcode::iter().map(|x| x.global_opcode()),
160 )?;
161
162 let fri_reduced_opening = FriReducedOpeningExecutor::new();
163 inventory.add_executor(
164 fri_reduced_opening,
165 FriOpcode::iter().map(|x| x.global_opcode()),
166 )?;
167
168 let verify_batch = NativePoseidon2Executor::<F, 1>::new(Poseidon2Config::default());
169 inventory.add_executor(
170 verify_batch,
171 [
172 VerifyBatchOpcode::VERIFY_BATCH.global_opcode(),
173 Poseidon2Opcode::PERM_POS2.global_opcode(),
174 Poseidon2Opcode::COMP_POS2.global_opcode(),
175 ],
176 )?;
177
178 inventory.add_phantom_sub_executor(
179 NativeHintInputSubEx,
180 PhantomDiscriminant(NativePhantom::HintInput as u16),
181 )?;
182
183 inventory.add_phantom_sub_executor(
184 NativeHintSliceSubEx::<1>,
185 PhantomDiscriminant(NativePhantom::HintFelt as u16),
186 )?;
187
188 inventory.add_phantom_sub_executor(
189 NativeHintBitsSubEx,
190 PhantomDiscriminant(NativePhantom::HintBits as u16),
191 )?;
192
193 inventory.add_phantom_sub_executor(
194 NativePrintSubEx,
195 PhantomDiscriminant(NativePhantom::Print as u16),
196 )?;
197
198 inventory.add_phantom_sub_executor(
199 NativeHintLoadSubEx,
200 PhantomDiscriminant(NativePhantom::HintLoad as u16),
201 )?;
202
203 Ok(())
204 }
205}
206
207impl<SC: StarkGenericConfig> VmCircuitExtension<SC> for Native
208where
209 Val<SC>: VmField,
210{
211 fn extend_circuit(&self, inventory: &mut AirInventory<SC>) -> Result<(), AirInventoryError> {
212 let SystemPort {
213 execution_bus,
214 program_bus,
215 memory_bridge,
216 } = inventory.system().port();
217 let exec_bridge = ExecutionBridge::new(execution_bus, program_bus);
218 let range_checker = inventory.range_checker().bus;
219
220 let load_store = NativeLoadStoreAir::<1>::new(
221 NativeLoadStoreAdapterAir::new(memory_bridge, exec_bridge),
222 NativeLoadStoreCoreAir::new(NativeLoadStoreOpcode::CLASS_OFFSET),
223 );
224 inventory.add_air(load_store);
225
226 let block_load_store = NativeLoadStoreAir::<BLOCK_LOAD_STORE_SIZE>::new(
227 NativeLoadStoreAdapterAir::new(memory_bridge, exec_bridge),
228 NativeLoadStoreCoreAir::new(NativeLoadStore4Opcode::CLASS_OFFSET),
229 );
230 inventory.add_air(block_load_store);
231
232 let branch_equal = NativeBranchEqAir::new(
233 BranchNativeAdapterAir::new(exec_bridge, memory_bridge),
234 BranchEqualCoreAir::new(NativeBranchEqualOpcode::CLASS_OFFSET, DEFAULT_PC_STEP),
235 );
236 inventory.add_air(branch_equal);
237
238 let jal_rangecheck = JalRangeCheckAir::new(
239 ExecutionBridge::new(execution_bus, program_bus),
240 memory_bridge,
241 range_checker,
242 );
243 inventory.add_air(jal_rangecheck);
244
245 let field_arithmetic = FieldArithmeticAir::new(
246 AluNativeAdapterAir::new(exec_bridge, memory_bridge),
247 FieldArithmeticCoreAir::new(),
248 );
249 inventory.add_air(field_arithmetic);
250
251 let field_extension = FieldExtensionAir::new(
252 NativeVectorizedAdapterAir::new(exec_bridge, memory_bridge),
253 FieldExtensionCoreAir::new(),
254 );
255 inventory.add_air(field_extension);
256
257 let fri_reduced_opening = FriReducedOpeningAir::new(
258 ExecutionBridge::new(execution_bus, program_bus),
259 memory_bridge,
260 );
261 inventory.add_air(fri_reduced_opening);
262
263 let verify_batch = NativePoseidon2Air::<_, 1>::new(
264 exec_bridge,
265 memory_bridge,
266 VerifyBatchBus::new(inventory.new_bus_idx()),
267 Poseidon2Config::default(),
268 );
269 inventory.add_air(verify_batch);
270
271 Ok(())
272 }
273}
274
275pub struct NativeCpuProverExt;
276impl<E, SC, RA> VmProverExtension<E, RA, Native> for NativeCpuProverExt
279where
280 SC: StarkGenericConfig,
281 E: StarkEngine<SC = SC, PB = CpuBackend<SC>, PD = CpuDevice<SC>>,
282 RA: RowMajorMatrixArena<Val<SC>>,
283 Val<SC>: VmField,
284{
285 fn extend_prover(
286 &self,
287 _: &Native,
288 inventory: &mut ChipInventory<SC, RA, CpuBackend<SC>>,
289 ) -> Result<(), ChipInventoryError> {
290 let range_checker = inventory.range_checker()?.clone();
291 let timestamp_max_bits = inventory.timestamp_max_bits();
292 let mem_helper = SharedMemoryHelper::new(range_checker.clone(), timestamp_max_bits);
293
294 inventory.next_air::<NativeLoadStoreAir<1>>()?;
297 let load_store = NativeLoadStoreChip::<_, 1>::new(
298 NativeLoadStoreCoreFiller::new(NativeLoadStoreAdapterFiller),
299 mem_helper.clone(),
300 );
301 inventory.add_executor_chip(load_store);
302
303 inventory.next_air::<NativeLoadStoreAir<BLOCK_LOAD_STORE_SIZE>>()?;
304 let block_load_store = NativeLoadStoreChip::<_, BLOCK_LOAD_STORE_SIZE>::new(
305 NativeLoadStoreCoreFiller::new(NativeLoadStoreAdapterFiller),
306 mem_helper.clone(),
307 );
308 inventory.add_executor_chip(block_load_store);
309
310 inventory.next_air::<NativeBranchEqAir>()?;
311 let branch_eq = NativeBranchEqChip::new(
312 NativeBranchEqualFiller::new(BranchNativeAdapterFiller),
313 mem_helper.clone(),
314 );
315
316 inventory.add_executor_chip(branch_eq);
317
318 inventory.next_air::<JalRangeCheckAir>()?;
319 let jal_rangecheck = NativeJalRangeCheckChip::new(
320 JalRangeCheckFiller::new(range_checker.clone()),
321 mem_helper.clone(),
322 );
323 inventory.add_executor_chip(jal_rangecheck);
324
325 inventory.next_air::<FieldArithmeticAir>()?;
326 let field_arithmetic = FieldArithmeticChip::new(
327 FieldArithmeticCoreFiller::new(AluNativeAdapterFiller),
328 mem_helper.clone(),
329 );
330 inventory.add_executor_chip(field_arithmetic);
331
332 inventory.next_air::<FieldExtensionAir>()?;
333 let field_extension = FieldExtensionChip::new(
334 FieldExtensionCoreFiller::new(NativeVectorizedAdapterFiller),
335 mem_helper.clone(),
336 );
337 inventory.add_executor_chip(field_extension);
338
339 inventory.next_air::<FriReducedOpeningAir>()?;
340 let fri_reduced_opening =
341 FriReducedOpeningChip::new(FriReducedOpeningFiller::new(), mem_helper.clone());
342 inventory.add_executor_chip(fri_reduced_opening);
343
344 inventory.next_air::<NativePoseidon2Air<Val<SC>, 1>>()?;
345 let poseidon2 = NativePoseidon2Chip::<_, 1>::new(
346 NativePoseidon2Filler::new(Poseidon2Config::default()),
347 mem_helper.clone(),
348 );
349 inventory.add_executor_chip(poseidon2);
350
351 Ok(())
352 }
353}
354
355pub(crate) mod phantom {
356 use eyre::bail;
357 use openvm_circuit::{
358 arch::{PhantomSubExecutor, Streams},
359 system::memory::online::GuestMemory,
360 };
361 use openvm_instructions::PhantomDiscriminant;
362 use openvm_stark_backend::p3_field::{Field, PrimeField32};
363 use rand::rngs::StdRng;
364
365 pub struct NativeHintInputSubEx;
366 pub struct NativeHintSliceSubEx<const N: usize>;
367 pub struct NativePrintSubEx;
368 pub struct NativeHintBitsSubEx;
369 pub struct NativeHintLoadSubEx;
370
371 impl<F: Field> PhantomSubExecutor<F> for NativeHintInputSubEx {
372 fn phantom_execute(
373 &self,
374 _: &GuestMemory,
375 streams: &mut Streams<F>,
376 _: &mut StdRng,
377 _: PhantomDiscriminant,
378 _: u32,
379 _: u32,
380 _: u16,
381 ) -> eyre::Result<()> {
382 let hint = match streams.input_stream.pop_front() {
383 Some(hint) => hint,
384 None => {
385 bail!("EndOfInputStream");
386 }
387 };
388 assert!(streams.hint_stream.is_empty());
389 streams.hint_stream.push_back(F::from_usize(hint.len()));
390 streams.hint_stream.extend(hint);
391 Ok(())
392 }
393 }
394
395 impl<F: Field, const N: usize> PhantomSubExecutor<F> for NativeHintSliceSubEx<N> {
396 fn phantom_execute(
397 &self,
398 _: &GuestMemory,
399 streams: &mut Streams<F>,
400 _: &mut StdRng,
401 _: PhantomDiscriminant,
402 _: u32,
403 _: u32,
404 _: u16,
405 ) -> eyre::Result<()> {
406 let hint = match streams.input_stream.pop_front() {
407 Some(hint) => hint,
408 None => {
409 bail!("EndOfInputStream");
410 }
411 };
412 assert!(streams.hint_stream.is_empty());
413 assert_eq!(hint.len(), N);
414 streams.hint_stream = hint.into();
415 Ok(())
416 }
417 }
418
419 impl<F: PrimeField32> PhantomSubExecutor<F> for NativePrintSubEx {
420 fn phantom_execute(
421 &self,
422 memory: &GuestMemory,
423 _: &mut Streams<F>,
424 _: &mut StdRng,
425 _: PhantomDiscriminant,
426 a: u32,
427 _: u32,
428 c_upper: u16,
429 ) -> eyre::Result<()> {
430 assert!(
432 (c_upper as usize) < memory.memory.config.len(),
433 "c_upper out of bounds"
434 );
435 let [value] = unsafe { memory.read::<F, 1>(c_upper as u32, a) };
439 println!("{value}");
440 Ok(())
441 }
442 }
443
444 impl<F: PrimeField32> PhantomSubExecutor<F> for NativeHintBitsSubEx {
445 fn phantom_execute(
446 &self,
447 memory: &GuestMemory,
448 streams: &mut Streams<F>,
449 _: &mut StdRng,
450 _: PhantomDiscriminant,
451 a: u32,
452 len: u32,
453 c_upper: u16,
454 ) -> eyre::Result<()> {
455 assert!(
457 (c_upper as usize) < memory.memory.config.len(),
458 "c_upper out of bounds"
459 );
460 let [val] = unsafe { memory.read::<F, 1>(c_upper as u32, a) };
464 let mut val = val.as_canonical_u32();
465
466 assert!(streams.hint_stream.is_empty());
467 for _ in 0..len {
468 streams.hint_stream.push_back(F::from_u32(val & 1));
469 val >>= 1;
470 }
471 Ok(())
472 }
473 }
474
475 impl<F: PrimeField32> PhantomSubExecutor<F> for NativeHintLoadSubEx {
476 fn phantom_execute(
477 &self,
478 _: &GuestMemory,
479 streams: &mut Streams<F>,
480 _: &mut StdRng,
481 _: PhantomDiscriminant,
482 _: u32,
483 _: u32,
484 _: u16,
485 ) -> eyre::Result<()> {
486 let payload = match streams.input_stream.pop_front() {
487 Some(hint) => hint,
488 None => {
489 bail!("EndOfInputStream");
490 }
491 };
492 let id = streams.hint_space.len();
493 streams.hint_space.push(payload);
494 assert!(streams.hint_stream.is_empty());
496 streams.hint_stream.push_back(F::from_usize(id));
497 Ok(())
498 }
499 }
500}
501
502#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize)]
503pub struct CastFExtension;
504
505#[derive(Clone, From, AnyEnum, Executor, MeteredExecutor, PreflightExecutor)]
506#[cfg_attr(
507 feature = "aot",
508 derive(
509 openvm_circuit_derive::AotExecutor,
510 openvm_circuit_derive::AotMeteredExecutor
511 )
512)]
513pub enum CastFExtensionExecutor {
514 CastF(CastFExecutor),
515}
516
517impl<F: PrimeField32> VmExecutionExtension<F> for CastFExtension {
518 type Executor = CastFExtensionExecutor;
519
520 fn extend_execution(
521 &self,
522 inventory: &mut ExecutorInventoryBuilder<F, CastFExtensionExecutor>,
523 ) -> Result<(), ExecutorInventoryError> {
524 let castf = CastFExecutor::new(ConvertAdapterExecutor::new());
525 inventory.add_executor(castf, [CastfOpcode::CASTF.global_opcode()])?;
526 Ok(())
527 }
528}
529
530impl<SC: StarkGenericConfig> VmCircuitExtension<SC> for CastFExtension {
531 fn extend_circuit(&self, inventory: &mut AirInventory<SC>) -> Result<(), AirInventoryError> {
532 let SystemPort {
533 execution_bus,
534 program_bus,
535 memory_bridge,
536 } = inventory.system().port();
537 let exec_bridge = ExecutionBridge::new(execution_bus, program_bus);
538 let range_checker = inventory.range_checker().bus;
539
540 let castf = CastFAir::new(
541 ConvertAdapterAir::new(exec_bridge, memory_bridge),
542 CastFCoreAir::new(range_checker),
543 );
544 inventory.add_air(castf);
545 Ok(())
546 }
547}
548
549impl<E, SC, RA> VmProverExtension<E, RA, CastFExtension> for NativeCpuProverExt
550where
551 SC: StarkGenericConfig,
552 E: StarkEngine<SC = SC, PB = CpuBackend<SC>, PD = CpuDevice<SC>>,
553 RA: RowMajorMatrixArena<Val<SC>>,
554 Val<SC>: PrimeField32,
555{
556 fn extend_prover(
557 &self,
558 _: &CastFExtension,
559 inventory: &mut ChipInventory<SC, RA, CpuBackend<SC>>,
560 ) -> Result<(), ChipInventoryError> {
561 let range_checker = inventory.range_checker()?.clone();
562 let timestamp_max_bits = inventory.timestamp_max_bits();
563 let mem_helper = SharedMemoryHelper::new(range_checker.clone(), timestamp_max_bits);
564
565 inventory.next_air::<CastFAir>()?;
566 let castf = CastFChip::new(
567 CastFCoreFiller::new(ConvertAdapterFiller::new(), range_checker),
568 mem_helper.clone(),
569 );
570 inventory.add_executor_chip(castf);
571
572 Ok(())
573 }
574}
575
576#[rustfmt::skip]
580pub const NATIVE_MAX_TRACE_HEIGHTS: &[u32] = &[
581 4194304, 4, 64, 2097152, 8388608, 4194304, 262144, 4194304, 33554432, 2097152, 16777216, 262144, 4194304, 1048576, 4194304, 131072, 262144, ];