halo2_axiom/poly/kzg/multiopen/shplonk/
prover.rs

1use std::fmt::Debug;
2use std::hash::Hash;
3use std::marker::PhantomData;
4use std::ops::MulAssign;
5
6use super::{
7    construct_intermediate_sets, ChallengeU, ChallengeV, ChallengeY, Commitment, RotationSet,
8};
9use crate::arithmetic::{
10    eval_polynomial, evaluate_vanishing_polynomial, kate_division, lagrange_interpolate,
11    parallelize, powers, CurveAffine,
12};
13use crate::helpers::SerdeCurveAffine;
14use crate::poly::commitment::{Blind, ParamsProver, Prover};
15use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG};
16use crate::poly::query::{PolynomialPointer, ProverQuery};
17use crate::poly::{Coeff, Polynomial};
18use crate::transcript::{EncodedChallenge, TranscriptWrite};
19
20use crate::multicore::IntoParallelIterator;
21use ff::Field;
22use group::Curve;
23use pairing::Engine;
24use rand_core::RngCore;
25
26use std::io;
27
28#[cfg(feature = "multicore")]
29use crate::multicore::ParallelIterator;
30
31fn div_by_vanishing<F: Field>(poly: Polynomial<F, Coeff>, roots: &[F]) -> Vec<F> {
32    let poly = roots
33        .iter()
34        .fold(poly.values, |poly, point| kate_division(&poly, *point));
35
36    poly
37}
38
39struct CommitmentExtension<'a, C: CurveAffine> {
40    commitment: Commitment<C::Scalar, PolynomialPointer<'a, C>>,
41    low_degree_equivalent: Polynomial<C::Scalar, Coeff>,
42}
43
44impl<'a, C: CurveAffine> Commitment<C::Scalar, PolynomialPointer<'a, C>> {
45    fn extend(&self, points: &[C::Scalar]) -> CommitmentExtension<'a, C> {
46        let poly = lagrange_interpolate(points, &self.evals()[..]);
47
48        let low_degree_equivalent = Polynomial {
49            values: poly,
50            _marker: PhantomData,
51        };
52
53        CommitmentExtension {
54            commitment: self.clone(),
55            low_degree_equivalent,
56        }
57    }
58}
59
60impl<'a, C: CurveAffine> CommitmentExtension<'a, C> {
61    fn linearisation_contribution(&self, u: C::Scalar) -> Polynomial<C::Scalar, Coeff> {
62        let p_x = self.commitment.get().poly;
63        let r_eval = eval_polynomial(&self.low_degree_equivalent.values[..], u);
64        p_x - r_eval
65    }
66
67    fn quotient_contribution(&self) -> Polynomial<C::Scalar, Coeff> {
68        let len = self.low_degree_equivalent.len();
69        let mut p_x = self.commitment.get().poly.clone();
70        parallelize(&mut p_x.values[0..len], |lhs, start| {
71            for (lhs, rhs) in lhs
72                .iter_mut()
73                .zip(self.low_degree_equivalent.values[start..].iter())
74            {
75                *lhs -= *rhs;
76            }
77        });
78        p_x
79    }
80}
81
82struct RotationSetExtension<'a, C: CurveAffine> {
83    commitments: Vec<CommitmentExtension<'a, C>>,
84    points: Vec<C::Scalar>,
85}
86
87impl<'a, C: CurveAffine> RotationSet<C::Scalar, PolynomialPointer<'a, C>> {
88    fn extend(self, commitments: Vec<CommitmentExtension<'a, C>>) -> RotationSetExtension<'a, C> {
89        RotationSetExtension {
90            commitments,
91            points: self.points,
92        }
93    }
94}
95
96/// Concrete KZG prover with SHPLONK variant
97#[derive(Debug)]
98pub struct ProverSHPLONK<'a, E: Engine> {
99    params: &'a ParamsKZG<E>,
100}
101
102impl<'a, E: Engine> ProverSHPLONK<'a, E> {
103    /// Given parameters creates new prover instance
104    pub fn new(params: &'a ParamsKZG<E>) -> Self {
105        Self { params }
106    }
107}
108
109/// Create a multi-opening proof
110impl<'params, E: Engine + Debug> Prover<'params, KZGCommitmentScheme<E>>
111    for ProverSHPLONK<'params, E>
112where
113    E::G1Affine: SerdeCurveAffine<ScalarExt = E::Fr, CurveExt = E::G1>,
114    E::G2Affine: SerdeCurveAffine,
115    E::Fr: Hash,
116{
117    const QUERY_INSTANCE: bool = false;
118
119    fn new(params: &'params ParamsKZG<E>) -> Self {
120        Self { params }
121    }
122
123    /// Create a multi-opening proof
124    fn create_proof<
125        'com,
126        Ch: EncodedChallenge<E::G1Affine>,
127        T: TranscriptWrite<E::G1Affine, Ch>,
128        R,
129        I,
130    >(
131        &self,
132        _: R,
133        transcript: &mut T,
134        queries: I,
135    ) -> io::Result<()>
136    where
137        I: IntoIterator<Item = ProverQuery<'com, E::G1Affine>> + Clone,
138        R: RngCore,
139    {
140        // TODO: explore if it is safe to use same challenge
141        // for different sets that are already combined with anoter challenge
142        let y: ChallengeY<_> = transcript.squeeze_challenge_scalar();
143
144        let quotient_contribution = |rotation_set: &RotationSetExtension<E::G1Affine>| {
145            // [P_i_0(X) - R_i_0(X), P_i_1(X) - R_i_1(X), ... ]
146            #[allow(clippy::needless_collect)]
147            let numerators = rotation_set
148                .commitments
149                .as_slice()
150                .into_par_iter()
151                .map(|commitment| commitment.quotient_contribution())
152                .collect::<Vec<_>>();
153
154            // define numerator polynomial as
155            // N_i_j(X) = (P_i_j(X) - R_i_j(X))
156            // and combine polynomials with same evaluation point set
157            // N_i(X) = linear_combinination(y, N_i_j(X))
158            // where y is random scalar to combine numerator polynomials
159            let n_x = numerators
160                .into_iter()
161                .zip(powers(*y))
162                .map(|(numerator, power_of_y)| numerator * power_of_y)
163                .reduce(|acc, numerator| acc + &numerator)
164                .unwrap();
165
166            let points = &rotation_set.points[..];
167
168            // quotient contribution of this evaluation set is
169            // Q_i(X) = N_i(X) / Z_i(X) where
170            // Z_i(X) = (x - r_i_0) * (x - r_i_1) * ...
171            let mut poly = div_by_vanishing(n_x, points);
172            poly.resize(self.params.n as usize, E::Fr::ZERO);
173
174            Polynomial {
175                values: poly,
176                _marker: PhantomData,
177            }
178        };
179
180        let intermediate_sets = construct_intermediate_sets(queries).ok_or_else(|| {
181            io::Error::new(
182                io::ErrorKind::InvalidInput,
183                "queries iterator contains mismatching evaluations",
184            )
185        })?;
186        let (rotation_sets, super_point_set) = (
187            intermediate_sets.rotation_sets,
188            intermediate_sets.super_point_set,
189        );
190
191        let rotation_sets: Vec<RotationSetExtension<E::G1Affine>> = rotation_sets
192            .into_par_iter()
193            .map(|rotation_set| {
194                let commitments: Vec<CommitmentExtension<E::G1Affine>> = rotation_set
195                    .commitments
196                    .as_slice()
197                    .into_par_iter()
198                    .map(|commitment_data| commitment_data.extend(&rotation_set.points))
199                    .collect();
200                rotation_set.extend(commitments)
201            })
202            .collect();
203
204        let v: ChallengeV<_> = transcript.squeeze_challenge_scalar();
205
206        #[allow(clippy::needless_collect)]
207        let quotient_polynomials = rotation_sets
208            .as_slice()
209            .into_par_iter()
210            .map(quotient_contribution)
211            .collect::<Vec<_>>();
212
213        let h_x: Polynomial<E::Fr, Coeff> = quotient_polynomials
214            .into_iter()
215            .zip(powers(*v))
216            .map(|(poly, power_of_v)| poly * power_of_v)
217            .reduce(|acc, poly| acc + &poly)
218            .unwrap();
219
220        let h = self.params.commit(&h_x, Blind::default()).to_affine();
221        transcript.write_point(h)?;
222        let u: ChallengeU<_> = transcript.squeeze_challenge_scalar();
223
224        let linearisation_contribution = |rotation_set: RotationSetExtension<E::G1Affine>| {
225            let mut diffs = super_point_set.clone();
226            for point in rotation_set.points.iter() {
227                diffs.remove(point);
228            }
229            let diffs = diffs.into_iter().collect::<Vec<_>>();
230
231            // calculate difference vanishing polynomial evaluation
232            let z_i = evaluate_vanishing_polynomial(&diffs[..], *u);
233
234            // inner linearisation contibutions are
235            // [P_i_0(X) - r_i_0, P_i_1(X) - r_i_1, ... ] where
236            // r_i_j = R_i_j(u) is the evaluation of low degree equivalent polynomial
237            // where u is random evaluation point
238            #[allow(clippy::needless_collect)]
239            let inner_contributions = rotation_set
240                .commitments
241                .as_slice()
242                .into_par_iter()
243                .map(|commitment| commitment.linearisation_contribution(*u))
244                .collect::<Vec<_>>();
245
246            // define inner contributor polynomial as
247            // L_i_j(X) = (P_i_j(X) - r_i_j)
248            // and combine polynomials with same evaluation point set
249            // L_i(X) = linear_combinination(y, L_i_j(X))
250            // where y is random scalar to combine inner contibutors
251            let l_x: Polynomial<E::Fr, Coeff> = inner_contributions
252                .into_iter()
253                .zip(powers(*y))
254                .map(|(poly, power_of_y)| poly * power_of_y)
255                .reduce(|acc, poly| acc + &poly)
256                .unwrap();
257
258            // finally scale l_x by difference vanishing polynomial evaluation z_i
259            (l_x * z_i, z_i)
260        };
261
262        #[allow(clippy::type_complexity)]
263        let (linearisation_contibutions, z_diffs): (
264            Vec<Polynomial<E::Fr, Coeff>>,
265            Vec<E::Fr>,
266        ) = rotation_sets
267            .into_par_iter()
268            .map(linearisation_contribution)
269            .unzip();
270
271        let l_x: Polynomial<E::Fr, Coeff> = linearisation_contibutions
272            .into_iter()
273            .zip(powers(*v))
274            .map(|(poly, power_of_v)| poly * power_of_v)
275            .reduce(|acc, poly| acc + &poly)
276            .unwrap();
277
278        let super_point_set = super_point_set.into_iter().collect::<Vec<_>>();
279        let zt_eval = evaluate_vanishing_polynomial(&super_point_set[..], *u);
280        let l_x = l_x - &(h_x * zt_eval);
281
282        // sanity check
283        #[cfg(debug_assertions)]
284        {
285            let must_be_zero = eval_polynomial(&l_x.values[..], *u);
286            assert_eq!(must_be_zero, E::Fr::ZERO);
287        }
288
289        let mut h_x = div_by_vanishing(l_x, &[*u]);
290
291        // normalize coefficients by the coefficient of the first polynomial
292        let z_0_diff_inv = z_diffs[0].invert().unwrap();
293        for h_i in h_x.iter_mut() {
294            h_i.mul_assign(z_0_diff_inv)
295        }
296
297        let h_x = Polynomial {
298            values: h_x,
299            _marker: PhantomData,
300        };
301
302        let h = self.params.commit(&h_x, Blind::default()).to_affine();
303        transcript.write_point(h)?;
304
305        Ok(())
306    }
307}