openvm_circuit/system/native_adapter/
util.rs

1use openvm_circuit::system::memory::online::TracingMemory;
2use openvm_instructions::{riscv::RV32_IMM_AS, NATIVE_AS};
3use openvm_stark_backend::p3_field::PrimeField32;
4
5use crate::{
6    arch::{execution_mode::ExecutionCtxTrait, VmStateMut},
7    system::memory::{offline_checker::MemoryWriteAuxCols, online::GuestMemory},
8};
9
10#[inline(always)]
11pub fn memory_read_native<F, const N: usize>(memory: &GuestMemory, ptr: u32) -> [F; N]
12where
13    F: PrimeField32,
14{
15    // SAFETY:
16    // - address space `NATIVE_AS` will always have cell type `F` and minimum alignment of `1`
17    unsafe { memory.read::<F, N>(NATIVE_AS, ptr) }
18}
19
20#[inline(always)]
21pub fn memory_read_or_imm_native<F>(memory: &GuestMemory, addr_space: u32, ptr_or_imm: F) -> F
22where
23    F: PrimeField32,
24{
25    debug_assert!(addr_space == RV32_IMM_AS || addr_space == NATIVE_AS);
26
27    if addr_space == NATIVE_AS {
28        let [result]: [F; 1] = memory_read_native(memory, ptr_or_imm.as_canonical_u32());
29        result
30    } else {
31        ptr_or_imm
32    }
33}
34
35#[inline(always)]
36pub fn memory_write_native<F, const N: usize>(memory: &mut GuestMemory, ptr: u32, data: [F; N])
37where
38    F: PrimeField32,
39{
40    // SAFETY:
41    // - address space `NATIVE_AS` will always have cell type `F` and minimum alignment of `1`
42    unsafe { memory.write::<F, N>(NATIVE_AS, ptr, data) }
43}
44
45#[inline(always)]
46pub fn memory_read_native_from_state<Ctx, F, const N: usize>(
47    state: &mut VmStateMut<F, GuestMemory, Ctx>,
48    ptr: u32,
49) -> [F; N]
50where
51    F: PrimeField32,
52    Ctx: ExecutionCtxTrait,
53{
54    state.ctx.on_memory_operation(NATIVE_AS, ptr, N as u32);
55
56    memory_read_native(state.memory, ptr)
57}
58
59#[inline(always)]
60pub fn memory_read_or_imm_native_from_state<Ctx, F>(
61    state: &mut VmStateMut<F, GuestMemory, Ctx>,
62    addr_space: u32,
63    ptr_or_imm: F,
64) -> F
65where
66    F: PrimeField32,
67    Ctx: ExecutionCtxTrait,
68{
69    debug_assert!(addr_space == RV32_IMM_AS || addr_space == NATIVE_AS);
70
71    if addr_space == NATIVE_AS {
72        let [result]: [F; 1] = memory_read_native_from_state(state, ptr_or_imm.as_canonical_u32());
73        result
74    } else {
75        ptr_or_imm
76    }
77}
78
79#[inline(always)]
80pub fn memory_write_native_from_state<Ctx, F, const N: usize>(
81    state: &mut VmStateMut<F, GuestMemory, Ctx>,
82    ptr: u32,
83    data: [F; N],
84) where
85    F: PrimeField32,
86    Ctx: ExecutionCtxTrait,
87{
88    state.ctx.on_memory_operation(NATIVE_AS, ptr, N as u32);
89
90    memory_write_native(state.memory, ptr, data)
91}
92
93/// Atomic read operation which increments the timestamp by 1.
94/// Returns `(t_prev, [ptr:BLOCK_SIZE]_4)` where `t_prev` is the timestamp of the last memory
95/// access.
96#[inline(always)]
97pub fn timed_read_native<F, const BLOCK_SIZE: usize>(
98    memory: &mut TracingMemory,
99    ptr: u32,
100) -> (u32, [F; BLOCK_SIZE])
101where
102    F: PrimeField32,
103{
104    // SAFETY:
105    // - address space `Native` will always have cell type `F` and minimum alignment of `1`
106    unsafe { memory.read::<F, BLOCK_SIZE, 1>(NATIVE_AS, ptr) }
107}
108
109#[inline(always)]
110pub fn timed_write_native<F, const BLOCK_SIZE: usize>(
111    memory: &mut TracingMemory,
112    ptr: u32,
113    vals: [F; BLOCK_SIZE],
114) -> (u32, [F; BLOCK_SIZE])
115where
116    F: PrimeField32,
117{
118    // SAFETY:
119    // - address space `Native` will always have cell type `F` and minimum alignment of `1`
120    unsafe { memory.write::<F, BLOCK_SIZE, 1>(NATIVE_AS, ptr, vals) }
121}
122
123/// Reads register value at `ptr` from memory and records the previous timestamp.
124/// Reads are only done from address space [NATIVE_AS].
125#[inline(always)]
126pub fn tracing_read_native<F, const BLOCK_SIZE: usize>(
127    memory: &mut TracingMemory,
128    ptr: u32,
129    prev_timestamp: &mut u32,
130) -> [F; BLOCK_SIZE]
131where
132    F: PrimeField32,
133{
134    let (t_prev, data) = timed_read_native(memory, ptr);
135    *prev_timestamp = t_prev;
136    data
137}
138
139/// Writes `ptr, vals` into memory and records the previous timestamp and data.
140/// Writes are only done to address space [NATIVE_AS].
141#[inline(always)]
142pub fn tracing_write_native<F, const BLOCK_SIZE: usize>(
143    memory: &mut TracingMemory,
144    ptr: u32,
145    vals: [F; BLOCK_SIZE],
146    prev_timestamp: &mut u32,
147    prev_data: &mut [F; BLOCK_SIZE],
148) where
149    F: PrimeField32,
150{
151    let (t_prev, data_prev) = timed_write_native(memory, ptr, vals);
152    *prev_timestamp = t_prev;
153    *prev_data = data_prev;
154}
155
156/// Writes `ptr, vals` into memory and records the previous timestamp and data.
157#[inline(always)]
158pub fn tracing_write_native_inplace<F, const BLOCK_SIZE: usize>(
159    memory: &mut TracingMemory,
160    ptr: u32,
161    vals: [F; BLOCK_SIZE],
162    cols: &mut MemoryWriteAuxCols<F, BLOCK_SIZE>,
163) where
164    F: PrimeField32,
165{
166    let (t_prev, data_prev) = timed_write_native(memory, ptr, vals);
167    cols.base.set_prev(F::from_canonical_u32(t_prev));
168    cols.prev_data = data_prev;
169}
170
171/// Reads value at `_ptr` from memory and records the previous timestamp.
172/// If the read is an immediate, the previous timestamp will be set to `u32::MAX`.
173#[inline(always)]
174pub fn tracing_read_or_imm_native<F>(
175    memory: &mut TracingMemory,
176    addr_space: F,
177    ptr_or_imm: F,
178    prev_timestamp: &mut u32,
179) -> F
180where
181    F: PrimeField32,
182{
183    debug_assert!(
184        addr_space == F::ZERO || addr_space == F::from_canonical_u32(NATIVE_AS),
185        "addr_space={} is not valid",
186        addr_space
187    );
188
189    if addr_space == F::ZERO {
190        *prev_timestamp = u32::MAX;
191        memory.increment_timestamp();
192        ptr_or_imm
193    } else {
194        let data: [F; 1] =
195            tracing_read_native(memory, ptr_or_imm.as_canonical_u32(), prev_timestamp);
196        data[0]
197    }
198}