halo2_base/gates/flex_gate/threads/
single_phase.rs1use std::cell::RefCell;
2
3use getset::CopyGetters;
4
5use crate::{
6 gates::{
7 circuit::CircuitBuilderStage,
8 flex_gate::{BasicGateConfig, ThreadBreakPoints},
9 },
10 utils::halo2::{raw_assign_advice, raw_constrain_equal},
11 utils::ScalarField,
12 virtual_region::copy_constraints::{CopyConstraintManager, SharedCopyConstraintManager},
13 Context, ContextCell,
14};
15use crate::{
16 halo2_proofs::circuit::{Region, Value},
17 virtual_region::manager::VirtualRegionManager,
18};
19
20#[derive(Clone, Debug, Default, CopyGetters)]
23pub struct SinglePhaseCoreManager<F: ScalarField> {
24 pub threads: Vec<Context<F>>,
26 pub copy_manager: SharedCopyConstraintManager<F>,
28 #[getset(get_copy = "pub")]
30 witness_gen_only: bool,
31 #[getset(get_copy = "pub")]
33 pub(crate) use_unknown: bool,
34 #[getset(get_copy = "pub", set)]
36 pub(crate) phase: usize,
37 pub break_points: RefCell<Option<ThreadBreakPoints>>,
40}
41
42impl<F: ScalarField> SinglePhaseCoreManager<F> {
43 pub fn new(witness_gen_only: bool, copy_manager: SharedCopyConstraintManager<F>) -> Self {
49 Self {
50 threads: vec![],
51 witness_gen_only,
52 use_unknown: false,
53 phase: 0,
54 copy_manager,
55 ..Default::default()
56 }
57 }
58
59 pub fn in_phase(self, phase: usize) -> Self {
61 Self { phase, ..self }
62 }
63
64 pub fn from_stage(
66 stage: CircuitBuilderStage,
67 copy_manager: SharedCopyConstraintManager<F>,
68 ) -> Self {
69 Self::new(stage.witness_gen_only(), copy_manager)
70 .unknown(stage == CircuitBuilderStage::Keygen)
71 }
72
73 pub fn unknown(self, use_unknown: bool) -> Self {
76 Self { use_unknown, ..self }
77 }
78
79 pub fn set_copy_manager(&mut self, copy_manager: SharedCopyConstraintManager<F>) {
81 self.copy_manager = copy_manager.clone();
82 for ctx in &mut self.threads {
83 ctx.copy_manager = copy_manager.clone();
84 }
85 }
86
87 pub fn use_copy_manager(mut self, copy_manager: SharedCopyConstraintManager<F>) -> Self {
89 self.set_copy_manager(copy_manager);
90 self
91 }
92
93 pub fn clear(&mut self) {
95 self.threads = vec![];
96 self.copy_manager.lock().unwrap().clear();
97 }
98
99 pub fn main(&mut self) -> &mut Context<F> {
101 if self.threads.is_empty() {
102 self.new_thread()
103 } else {
104 self.threads.last_mut().unwrap()
105 }
106 }
107
108 pub fn thread_count(&self) -> usize {
110 self.threads.len()
111 }
112
113 pub fn type_of(&self) -> &'static str {
115 match self.phase {
116 0 => "halo2-base:SinglePhaseCoreManager:FirstPhase",
117 1 => "halo2-base:SinglePhaseCoreManager:SecondPhase",
118 2 => "halo2-base:SinglePhaseCoreManager:ThirdPhase",
119 _ => panic!("Unsupported phase"),
120 }
121 }
122
123 pub fn new_context(&self, context_id: usize) -> Context<F> {
125 Context::new(
126 self.witness_gen_only,
127 self.phase,
128 self.type_of(),
129 context_id,
130 self.copy_manager.clone(),
131 )
132 }
133
134 pub fn new_thread(&mut self) -> &mut Context<F> {
137 let context_id = self.thread_count();
138 self.threads.push(self.new_context(context_id));
139 self.threads.last_mut().unwrap()
140 }
141
142 pub fn total_advice(&self) -> usize {
144 self.threads.iter().map(|ctx| ctx.advice.len()).sum::<usize>()
145 }
146}
147
148impl<F: ScalarField> VirtualRegionManager<F> for SinglePhaseCoreManager<F> {
149 type Config = (Vec<BasicGateConfig<F>>, usize); type Assignment = ();
151
152 fn assign_raw(&self, (config, usable_rows): &Self::Config, region: &mut Region<F>) {
153 if self.witness_gen_only {
154 let binding = self.break_points.borrow();
155 let break_points = binding.as_ref().expect("break points not set");
156 assign_witnesses(&self.threads, config, region, break_points);
157 } else {
158 let mut copy_manager = self.copy_manager.lock().unwrap();
159 let break_points = assign_with_constraints::<F, 4>(
160 &self.threads,
161 config,
162 region,
163 &mut copy_manager,
164 *usable_rows,
165 self.use_unknown,
166 );
167 let mut bp = self.break_points.borrow_mut();
168 if let Some(bp) = bp.as_ref() {
169 assert_eq!(bp, &break_points, "break points don't match");
170 } else {
171 *bp = Some(break_points);
172 }
173 }
174 }
175}
176
177pub fn assign_with_constraints<F: ScalarField, const ROTATIONS: usize>(
194 threads: &[Context<F>],
195 basic_gates: &[BasicGateConfig<F>],
196 region: &mut Region<F>,
197 copy_manager: &mut CopyConstraintManager<F>,
198 max_rows: usize,
199 use_unknown: bool,
200) -> ThreadBreakPoints {
201 let mut break_points = vec![];
202 let mut gate_index = 0;
203 let mut row_offset = 0;
204 for ctx in threads {
205 if ctx.advice.is_empty() {
206 continue;
207 }
208 let mut basic_gate = basic_gates
209 .get(gate_index)
210 .unwrap_or_else(|| panic!("NOT ENOUGH ADVICE COLUMNS. Perhaps blinding factors were not taken into account. The max non-poisoned rows is {max_rows}"));
211 assert_eq!(ctx.selector.len(), ctx.advice.len());
212
213 for (i, (advice, &q)) in ctx.advice.iter().zip(ctx.selector.iter()).enumerate() {
214 let column = basic_gate.value;
215 let value = if use_unknown { Value::unknown() } else { Value::known(advice) };
216 #[cfg(feature = "halo2-axiom")]
217 let cell = region.assign_advice(column, row_offset, value).cell();
218 #[cfg(not(feature = "halo2-axiom"))]
219 let cell = region
220 .assign_advice(|| "", column, row_offset, || value.map(|v| *v))
221 .unwrap()
222 .cell();
223 if let Some(old_cell) = copy_manager
224 .assigned_advices
225 .insert(ContextCell::new(ctx.type_id, ctx.context_id, i), cell)
226 {
227 assert!(
228 old_cell.row_offset == cell.row_offset && old_cell.column == cell.column,
229 "Trying to overwrite virtual cell with a different raw cell"
230 );
231 }
232
233 if (q && row_offset + ROTATIONS > max_rows) || row_offset >= max_rows - 1 {
236 break_points.push(row_offset);
237 row_offset = 0;
238 gate_index += 1;
239
240 if ROTATIONS > 1 && i + 2 >= ROTATIONS {
242 for delta in 1..ROTATIONS - 1 {
243 assert!(
244 !ctx.selector[i - delta],
245 "We do not support overlaps with delta = {delta}"
246 );
247 }
248 }
249 basic_gate = basic_gates
251 .get(gate_index)
252 .unwrap_or_else(|| panic!("NOT ENOUGH ADVICE COLUMNS. Perhaps blinding factors were not taken into account. The max non-poisoned rows is {max_rows}"));
253 let column = basic_gate.value;
254 #[cfg(feature = "halo2-axiom")]
255 let ncell = region.assign_advice(column, row_offset, value);
256 #[cfg(not(feature = "halo2-axiom"))]
257 let ncell =
258 region.assign_advice(|| "", column, row_offset, || value.map(|v| *v)).unwrap();
259 raw_constrain_equal(region, ncell.cell(), cell);
260 }
261
262 if q {
263 basic_gate
264 .q_enable
265 .enable(region, row_offset)
266 .expect("enable selector should not fail");
267 }
268
269 row_offset += 1;
270 }
271 }
272 break_points
273}
274
275pub fn assign_witnesses<F: ScalarField>(
284 threads: &[Context<F>],
285 basic_gates: &[BasicGateConfig<F>],
286 region: &mut Region<F>,
287 break_points: &ThreadBreakPoints,
288) {
289 if basic_gates.is_empty() {
290 assert_eq!(
291 threads.iter().map(|ctx| ctx.advice.len()).sum::<usize>(),
292 0,
293 "Trying to assign threads in a phase with no columns"
294 );
295 return;
296 }
297
298 let mut break_points = break_points.clone().into_iter();
299 let mut break_point = break_points.next();
300
301 let mut gate_index = 0;
302 let mut column = basic_gates[gate_index].value;
303 let mut row_offset = 0;
304
305 for ctx in threads {
306 for advice in &ctx.advice {
308 raw_assign_advice(region, column, row_offset, Value::known(advice));
309
310 if break_point == Some(row_offset) {
311 break_point = break_points.next();
312 row_offset = 0;
313 gate_index += 1;
314 column = basic_gates[gate_index].value;
315
316 raw_assign_advice(region, column, row_offset, Value::known(advice));
317 }
318
319 row_offset += 1;
320 }
321 }
322}