1use std::{
2 borrow::{Borrow, BorrowMut},
3 marker::PhantomData,
4};
5
6use openvm_circuit::{
7 arch::{
8 get_record_from_slice, AdapterAirContext, AdapterTraceExecutor, AdapterTraceFiller,
9 ExecutionBridge, ExecutionState, VmAdapterAir, VmAdapterInterface,
10 },
11 system::{
12 memory::{
13 offline_checker::{
14 MemoryBridge, MemoryReadAuxCols, MemoryReadAuxRecord, MemoryWriteAuxCols,
15 MemoryWriteAuxRecord,
16 },
17 online::TracingMemory,
18 MemoryAddress, MemoryAuxColsFactory,
19 },
20 native_adapter::util::{tracing_read_native, tracing_write_native},
21 },
22};
23use openvm_circuit_primitives::AlignedBytesBorrow;
24use openvm_circuit_primitives_derive::AlignedBorrow;
25use openvm_instructions::{instruction::Instruction, program::DEFAULT_PC_STEP, LocalOpcode};
26use openvm_native_compiler::{
27 conversion::AS,
28 NativeLoadStoreOpcode::{self, *},
29};
30use openvm_stark_backend::{
31 interaction::InteractionBuilder,
32 p3_air::BaseAir,
33 p3_field::{Field, FieldAlgebra, PrimeField32},
34};
35
36pub struct NativeLoadStoreInstruction<T> {
37 pub is_valid: T,
38 pub opcode: T,
40 pub is_loadw: T,
41 pub is_storew: T,
42 pub is_hint_storew: T,
43}
44
45pub struct NativeLoadStoreAdapterInterface<T, const NUM_CELLS: usize>(PhantomData<T>);
46
47impl<T, const NUM_CELLS: usize> VmAdapterInterface<T>
48 for NativeLoadStoreAdapterInterface<T, NUM_CELLS>
49{
50 type Reads = (T, [T; NUM_CELLS]);
51 type Writes = [T; NUM_CELLS];
52 type ProcessedInstruction = NativeLoadStoreInstruction<T>;
53}
54
55#[repr(C)]
56#[derive(Clone, Debug, AlignedBorrow)]
57pub struct NativeLoadStoreAdapterCols<T, const NUM_CELLS: usize> {
58 pub from_state: ExecutionState<T>,
59 pub a: T,
60 pub b: T,
61 pub c: T,
62
63 pub data_write_pointer: T,
64
65 pub pointer_read_aux_cols: MemoryReadAuxCols<T>,
66 pub data_read_aux_cols: MemoryReadAuxCols<T>,
67 pub data_write_aux_cols: MemoryWriteAuxCols<T, NUM_CELLS>,
68}
69
70#[derive(Clone, Copy, Debug, derive_new::new)]
71pub struct NativeLoadStoreAdapterAir<const NUM_CELLS: usize> {
72 pub(super) memory_bridge: MemoryBridge,
73 pub(super) execution_bridge: ExecutionBridge,
74}
75
76impl<F: Field, const NUM_CELLS: usize> BaseAir<F> for NativeLoadStoreAdapterAir<NUM_CELLS> {
77 fn width(&self) -> usize {
78 NativeLoadStoreAdapterCols::<F, NUM_CELLS>::width()
79 }
80}
81
82impl<AB: InteractionBuilder, const NUM_CELLS: usize> VmAdapterAir<AB>
83 for NativeLoadStoreAdapterAir<NUM_CELLS>
84{
85 type Interface = NativeLoadStoreAdapterInterface<AB::Expr, NUM_CELLS>;
86
87 fn eval(
88 &self,
89 builder: &mut AB,
90 local: &[AB::Var],
91 ctx: AdapterAirContext<AB::Expr, Self::Interface>,
92 ) {
93 let cols: &NativeLoadStoreAdapterCols<_, NUM_CELLS> = local.borrow();
94 let timestamp = cols.from_state.timestamp;
95 let mut timestamp_delta = AB::Expr::from_canonical_usize(0);
96
97 let is_valid = ctx.instruction.is_valid;
98 let is_loadw = ctx.instruction.is_loadw;
99 let is_storew = ctx.instruction.is_storew;
100 let is_hint_storew = ctx.instruction.is_hint_storew;
101
102 let native_as = AB::Expr::from_canonical_u32(AS::Native as u32);
103
104 let ptr = ctx.reads.0;
105 let data = ctx.writes;
108
109 self.memory_bridge
111 .read(
112 MemoryAddress::new(native_as.clone(), cols.c),
113 [ptr.clone()],
114 timestamp + timestamp_delta.clone(),
115 &cols.pointer_read_aux_cols,
116 )
117 .eval(builder, is_valid.clone());
118 timestamp_delta += is_valid.clone();
119
120 self.memory_bridge
121 .read(
122 MemoryAddress::new(
123 native_as.clone(),
124 is_storew.clone() * cols.a + is_loadw.clone() * (ptr.clone() + cols.b),
125 ),
126 data.clone(),
127 timestamp + timestamp_delta.clone(),
128 &cols.data_read_aux_cols,
129 )
130 .eval(builder, is_valid.clone() - is_hint_storew.clone());
131 timestamp_delta += is_valid.clone() - is_hint_storew.clone();
132
133 builder.assert_eq(
134 is_valid.clone() * cols.data_write_pointer,
135 is_loadw.clone() * cols.a
136 + (is_storew.clone() + is_hint_storew.clone()) * (ptr.clone() + cols.b),
137 );
138
139 self.memory_bridge
140 .write(
141 MemoryAddress::new(native_as.clone(), cols.data_write_pointer),
142 data.clone(),
143 timestamp + timestamp_delta.clone(),
144 &cols.data_write_aux_cols,
145 )
146 .eval(builder, is_valid.clone());
147 timestamp_delta += is_valid.clone();
148
149 self.execution_bridge
150 .execute_and_increment_or_set_pc(
151 ctx.instruction.opcode,
152 [
153 cols.a.into(),
154 cols.b.into(),
155 cols.c.into(),
156 native_as.clone(),
157 native_as.clone(),
158 ],
159 cols.from_state,
160 timestamp_delta.clone(),
161 (DEFAULT_PC_STEP, ctx.to_pc),
162 )
163 .eval(builder, is_valid.clone());
164 }
165
166 fn get_from_pc(&self, local: &[AB::Var]) -> AB::Var {
167 let local_cols: &NativeLoadStoreAdapterCols<_, NUM_CELLS> = local.borrow();
168 local_cols.from_state.pc
169 }
170}
171
172#[repr(C)]
173#[derive(AlignedBytesBorrow, Debug)]
174pub struct NativeLoadStoreAdapterRecord<F, const NUM_CELLS: usize> {
175 pub from_pc: u32,
176 pub from_timestamp: u32,
177 pub a: F,
178 pub b: F,
179 pub c: F,
180 pub write_ptr: F,
181
182 pub ptr_read: MemoryReadAuxRecord,
183 pub data_read: MemoryReadAuxRecord,
185 pub data_write: MemoryWriteAuxRecord<F, NUM_CELLS>,
186}
187
188#[derive(derive_new::new, Clone, Copy)]
189pub struct NativeLoadStoreAdapterExecutor<const NUM_CELLS: usize> {
190 offset: usize,
191}
192
193#[derive(derive_new::new)]
194pub struct NativeLoadStoreAdapterFiller<const NUM_CELLS: usize>;
195
196impl<F: PrimeField32, const NUM_CELLS: usize> AdapterTraceExecutor<F>
197 for NativeLoadStoreAdapterExecutor<NUM_CELLS>
198{
199 const WIDTH: usize = std::mem::size_of::<NativeLoadStoreAdapterCols<u8, NUM_CELLS>>();
200 type ReadData = (F, [F; NUM_CELLS]);
201 type WriteData = [F; NUM_CELLS];
202 type RecordMut<'a> = &'a mut NativeLoadStoreAdapterRecord<F, NUM_CELLS>;
203
204 #[inline(always)]
205 fn start(pc: u32, memory: &TracingMemory, record: &mut Self::RecordMut<'_>) {
206 record.from_pc = pc;
207 record.from_timestamp = memory.timestamp();
208 }
209
210 #[inline(always)]
211 fn read(
212 &self,
213 memory: &mut TracingMemory,
214 instruction: &Instruction<F>,
215 record: &mut Self::RecordMut<'_>,
216 ) -> Self::ReadData {
217 let &Instruction {
218 opcode,
219 a,
220 b,
221 c,
222 d,
223 e,
224 ..
225 } = instruction;
226
227 debug_assert_eq!(d.as_canonical_u32(), AS::Native as u32);
228 debug_assert_eq!(e.as_canonical_u32(), AS::Native as u32);
229
230 let local_opcode = NativeLoadStoreOpcode::from_usize(opcode.local_opcode_idx(self.offset));
231
232 record.a = a;
233 record.b = b;
234 record.c = c;
235
236 let [read_cell] = tracing_read_native::<F, 1>(
238 memory,
239 c.as_canonical_u32(),
240 &mut record.ptr_read.prev_timestamp,
241 );
242
243 let data_read_ptr = match local_opcode {
244 LOADW => read_cell + record.b,
245 STOREW | HINT_STOREW => record.a,
246 }
247 .as_canonical_u32();
248
249 match local_opcode {
251 LOADW => record.write_ptr = record.a,
252 STOREW | HINT_STOREW => record.write_ptr = read_cell + record.b,
253 }
254
255 let data_read: [F; NUM_CELLS] = match local_opcode {
257 HINT_STOREW => {
258 record.data_read.prev_timestamp = u32::MAX;
259 [F::ZERO; NUM_CELLS]
260 }
261 LOADW | STOREW => {
262 tracing_read_native(memory, data_read_ptr, &mut record.data_read.prev_timestamp)
263 }
264 };
265
266 (read_cell, data_read)
267 }
268
269 #[inline(always)]
270 fn write(
271 &self,
272 memory: &mut TracingMemory,
273 _instruction: &Instruction<F>,
274 data: Self::WriteData,
275 record: &mut Self::RecordMut<'_>,
276 ) {
277 tracing_write_native(
279 memory,
280 record.write_ptr.as_canonical_u32(),
281 data,
282 &mut record.data_write.prev_timestamp,
283 &mut record.data_write.prev_data,
284 );
285 }
286}
287
288impl<F: PrimeField32, const NUM_CELLS: usize> AdapterTraceFiller<F>
289 for NativeLoadStoreAdapterFiller<NUM_CELLS>
290{
291 const WIDTH: usize = size_of::<NativeLoadStoreAdapterCols<u8, NUM_CELLS>>();
292
293 #[inline(always)]
294 fn fill_trace_row(&self, mem_helper: &MemoryAuxColsFactory<F>, mut adapter_row: &mut [F]) {
295 let record: &NativeLoadStoreAdapterRecord<F, NUM_CELLS> =
299 unsafe { get_record_from_slice(&mut adapter_row, ()) };
300 let adapter_row: &mut NativeLoadStoreAdapterCols<F, NUM_CELLS> = adapter_row.borrow_mut();
301
302 let is_hint_storew = record.data_read.prev_timestamp == u32::MAX;
305
306 adapter_row
307 .data_write_aux_cols
308 .set_prev_data(record.data_write.prev_data);
309 mem_helper.fill(
311 record.data_write.prev_timestamp,
312 record.from_timestamp + 2 - is_hint_storew as u32,
313 adapter_row.data_write_aux_cols.as_mut(),
314 );
315
316 if !is_hint_storew {
317 mem_helper.fill(
318 record.data_read.prev_timestamp,
319 record.from_timestamp + 1,
320 adapter_row.data_read_aux_cols.as_mut(),
321 );
322 } else {
323 mem_helper.fill_zero(adapter_row.data_read_aux_cols.as_mut());
324 }
325
326 mem_helper.fill(
327 record.ptr_read.prev_timestamp,
328 record.from_timestamp,
329 adapter_row.pointer_read_aux_cols.as_mut(),
330 );
331
332 adapter_row.data_write_pointer = record.write_ptr;
333 adapter_row.c = record.c;
334 adapter_row.b = record.b;
335 adapter_row.a = record.a;
336
337 adapter_row.from_state.pc = F::from_canonical_u32(record.from_pc);
338 adapter_row.from_state.timestamp = F::from_canonical_u32(record.from_timestamp);
339 }
340}