halo2_axiom/
transcript.rs

1//! This module contains utilities and traits for dealing with Fiat-Shamir
2//! transcripts.
3
4use blake2b_simd::{Params as Blake2bParams, State as Blake2bState};
5use group::ff::{FromUniformBytes, PrimeField};
6use sha3::{Digest, Keccak256};
7use std::convert::TryInto;
8
9use halo2curves::{Coordinates, CurveAffine};
10
11use std::io::{self, Read, Write};
12use std::marker::PhantomData;
13
14/// Prefix to a prover's message soliciting a challenge
15const BLAKE2B_PREFIX_CHALLENGE: u8 = 0;
16
17/// Prefix to a prover's message containing a curve point
18const BLAKE2B_PREFIX_POINT: u8 = 1;
19
20/// Prefix to a prover's message containing a scalar
21const BLAKE2B_PREFIX_SCALAR: u8 = 2;
22
23/// Prefix to a prover's message soliciting a challenge
24const KECCAK256_PREFIX_CHALLENGE: u8 = 0;
25
26/// First prefix to a prover's message soliciting a challenge
27/// Not included in the growing state!
28const KECCAK256_PREFIX_CHALLENGE_LO: u8 = 10;
29
30/// Second prefix to a prover's message soliciting a challenge
31/// Not included in the growing state!
32const KECCAK256_PREFIX_CHALLENGE_HI: u8 = 11;
33
34/// Prefix to a prover's message containing a curve point
35const KECCAK256_PREFIX_POINT: u8 = 1;
36
37/// Prefix to a prover's message containing a scalar
38const KECCAK256_PREFIX_SCALAR: u8 = 2;
39
40/// Generic transcript view (from either the prover or verifier's perspective)
41pub trait Transcript<C: CurveAffine, E: EncodedChallenge<C>> {
42    /// Squeeze an encoded verifier challenge from the transcript.
43    fn squeeze_challenge(&mut self) -> E;
44
45    /// Squeeze a typed challenge (in the scalar field) from the transcript.
46    fn squeeze_challenge_scalar<T>(&mut self) -> ChallengeScalar<C, T> {
47        ChallengeScalar {
48            inner: self.squeeze_challenge().get_scalar(),
49            _marker: PhantomData,
50        }
51    }
52
53    /// Writing the point to the transcript without writing it to the proof,
54    /// treating it as a common input.
55    fn common_point(&mut self, point: C) -> io::Result<()>;
56
57    /// Writing the scalar to the transcript without writing it to the proof,
58    /// treating it as a common input.
59    fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>;
60}
61
62/// Transcript view from the perspective of a verifier that has access to an
63/// input stream of data from the prover to the verifier.
64pub trait TranscriptRead<C: CurveAffine, E: EncodedChallenge<C>>: Transcript<C, E> {
65    /// Read a curve point from the prover.
66    fn read_point(&mut self) -> io::Result<C>;
67
68    /// Read a curve scalar from the prover.
69    fn read_scalar(&mut self) -> io::Result<C::Scalar>;
70}
71
72/// Transcript view from the perspective of a prover that has access to an
73/// output stream of messages from the prover to the verifier.
74pub trait TranscriptWrite<C: CurveAffine, E: EncodedChallenge<C>>: Transcript<C, E> {
75    /// Write a curve point to the proof and the transcript.
76    fn write_point(&mut self, point: C) -> io::Result<()>;
77
78    /// Write a scalar to the proof and the transcript.
79    fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>;
80}
81
82/// Initializes transcript at verifier side.
83pub trait TranscriptReadBuffer<R: Read, C: CurveAffine, E: EncodedChallenge<C>>:
84    TranscriptRead<C, E>
85{
86    /// Initialize a transcript given an input buffer.
87    fn init(reader: R) -> Self;
88}
89
90/// Manages begining and finising of transcript pipeline.
91pub trait TranscriptWriterBuffer<W: Write, C: CurveAffine, E: EncodedChallenge<C>>:
92    TranscriptWrite<C, E>
93{
94    /// Initialize a transcript given an output buffer.
95    fn init(writer: W) -> Self;
96
97    /// Conclude the interaction and return the output buffer (writer).
98    fn finalize(self) -> W;
99}
100
101/// We will replace BLAKE2b with an algebraic hash function in a later version.
102#[derive(Debug, Clone)]
103pub struct Blake2bRead<R: Read, C: CurveAffine, E: EncodedChallenge<C>> {
104    state: Blake2bState,
105    reader: R,
106    _marker: PhantomData<(C, E)>,
107}
108
109/// Keccak256 hash function reader for EVM compatibility
110#[derive(Debug, Clone)]
111pub struct Keccak256Read<R: Read, C: CurveAffine, E: EncodedChallenge<C>> {
112    state: Keccak256,
113    reader: R,
114    _marker: PhantomData<(C, E)>,
115}
116
117impl<R: Read, C: CurveAffine> TranscriptReadBuffer<R, C, Challenge255<C>>
118    for Blake2bRead<R, C, Challenge255<C>>
119where
120    C::Scalar: FromUniformBytes<64>,
121{
122    /// Initialize a transcript given an input buffer.
123    fn init(reader: R) -> Self {
124        Blake2bRead {
125            state: Blake2bParams::new()
126                .hash_length(64)
127                .personal(b"Halo2-Transcript")
128                .to_state(),
129            reader,
130            _marker: PhantomData,
131        }
132    }
133}
134
135impl<R: Read, C: CurveAffine> TranscriptReadBuffer<R, C, Challenge255<C>>
136    for Keccak256Read<R, C, Challenge255<C>>
137where
138    C::Scalar: FromUniformBytes<64>,
139{
140    /// Initialize a transcript given an input buffer.
141    fn init(reader: R) -> Self {
142        let mut state = Keccak256::new();
143        state.update(b"Halo2-Transcript");
144        Keccak256Read {
145            state,
146            reader,
147            _marker: PhantomData,
148        }
149    }
150}
151
152impl<R: Read, C: CurveAffine> TranscriptRead<C, Challenge255<C>>
153    for Blake2bRead<R, C, Challenge255<C>>
154where
155    C::Scalar: FromUniformBytes<64>,
156{
157    fn read_point(&mut self) -> io::Result<C> {
158        let mut compressed = C::Repr::default();
159        self.reader.read_exact(compressed.as_mut())?;
160        let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| {
161            io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof")
162        })?;
163        self.common_point(point)?;
164
165        Ok(point)
166    }
167
168    fn read_scalar(&mut self) -> io::Result<C::Scalar> {
169        let mut data = <C::Scalar as PrimeField>::Repr::default();
170        self.reader.read_exact(data.as_mut())?;
171        let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| {
172            io::Error::new(
173                io::ErrorKind::Other,
174                "invalid field element encoding in proof",
175            )
176        })?;
177        self.common_scalar(scalar)?;
178
179        Ok(scalar)
180    }
181}
182
183impl<R: Read, C: CurveAffine> TranscriptRead<C, Challenge255<C>>
184    for Keccak256Read<R, C, Challenge255<C>>
185where
186    C::Scalar: FromUniformBytes<64>,
187{
188    fn read_point(&mut self) -> io::Result<C> {
189        let mut compressed = C::Repr::default();
190        self.reader.read_exact(compressed.as_mut())?;
191        let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| {
192            io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof")
193        })?;
194        self.common_point(point)?;
195
196        Ok(point)
197    }
198
199    fn read_scalar(&mut self) -> io::Result<C::Scalar> {
200        let mut data = <C::Scalar as PrimeField>::Repr::default();
201        self.reader.read_exact(data.as_mut())?;
202        let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| {
203            io::Error::new(
204                io::ErrorKind::Other,
205                "invalid field element encoding in proof",
206            )
207        })?;
208        self.common_scalar(scalar)?;
209
210        Ok(scalar)
211    }
212}
213
214impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>> for Blake2bRead<R, C, Challenge255<C>>
215where
216    C::Scalar: FromUniformBytes<64>,
217{
218    fn squeeze_challenge(&mut self) -> Challenge255<C> {
219        self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]);
220        let hasher = self.state.clone();
221        let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap();
222        Challenge255::<C>::new(&result)
223    }
224
225    fn common_point(&mut self, point: C) -> io::Result<()> {
226        self.state.update(&[BLAKE2B_PREFIX_POINT]);
227        let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
228            io::Error::new(
229                io::ErrorKind::Other,
230                "cannot write points at infinity to the transcript",
231            )
232        })?;
233        self.state.update(coords.x().to_repr().as_ref());
234        self.state.update(coords.y().to_repr().as_ref());
235
236        Ok(())
237    }
238
239    fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
240        self.state.update(&[BLAKE2B_PREFIX_SCALAR]);
241        self.state.update(scalar.to_repr().as_ref());
242
243        Ok(())
244    }
245}
246
247impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
248    for Keccak256Read<R, C, Challenge255<C>>
249where
250    C::Scalar: FromUniformBytes<64>,
251{
252    fn squeeze_challenge(&mut self) -> Challenge255<C> {
253        self.state.update([KECCAK256_PREFIX_CHALLENGE]);
254
255        let mut state_lo = self.state.clone();
256        let mut state_hi = self.state.clone();
257        state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]);
258        state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]);
259        let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
260        let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();
261
262        let mut t = result_lo.to_vec();
263        t.extend_from_slice(&result_hi[..]);
264        let result: [u8; 64] = t.as_slice().try_into().unwrap();
265
266        Challenge255::<C>::new(&result)
267    }
268
269    fn common_point(&mut self, point: C) -> io::Result<()> {
270        self.state.update([KECCAK256_PREFIX_POINT]);
271        let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
272            io::Error::new(
273                io::ErrorKind::Other,
274                "cannot write points at infinity to the transcript",
275            )
276        })?;
277        self.state.update(coords.x().to_repr().as_ref());
278        self.state.update(coords.y().to_repr().as_ref());
279
280        Ok(())
281    }
282
283    fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
284        self.state.update([KECCAK256_PREFIX_SCALAR]);
285        self.state.update(scalar.to_repr().as_ref());
286
287        Ok(())
288    }
289}
290
291/// We will replace BLAKE2b with an algebraic hash function in a later version.
292#[derive(Debug, Clone)]
293pub struct Blake2bWrite<W: Write, C: CurveAffine, E: EncodedChallenge<C>> {
294    state: Blake2bState,
295    writer: W,
296    _marker: PhantomData<(C, E)>,
297}
298
299/// Keccak256 hash function writer for EVM compatibility
300#[derive(Debug, Clone)]
301pub struct Keccak256Write<W: Write, C: CurveAffine, E: EncodedChallenge<C>> {
302    state: Keccak256,
303    writer: W,
304    _marker: PhantomData<(C, E)>,
305}
306
307impl<W: Write, C: CurveAffine> TranscriptWriterBuffer<W, C, Challenge255<C>>
308    for Blake2bWrite<W, C, Challenge255<C>>
309where
310    C::Scalar: FromUniformBytes<64>,
311{
312    /// Initialize a transcript given an output buffer.
313    fn init(writer: W) -> Self {
314        Blake2bWrite {
315            state: Blake2bParams::new()
316                .hash_length(64)
317                .personal(b"Halo2-Transcript")
318                .to_state(),
319            writer,
320            _marker: PhantomData,
321        }
322    }
323
324    fn finalize(self) -> W {
325        // TODO: handle outstanding scalars? see issue #138
326        self.writer
327    }
328}
329
330impl<W: Write, C: CurveAffine> TranscriptWriterBuffer<W, C, Challenge255<C>>
331    for Keccak256Write<W, C, Challenge255<C>>
332where
333    C::Scalar: FromUniformBytes<64>,
334{
335    /// Initialize a transcript given an output buffer.
336    fn init(writer: W) -> Self {
337        let mut state = Keccak256::new();
338        state.update(b"Halo2-Transcript");
339        Keccak256Write {
340            state,
341            writer,
342            _marker: PhantomData,
343        }
344    }
345
346    /// Conclude the interaction and return the output buffer (writer).
347    fn finalize(self) -> W {
348        // TODO: handle outstanding scalars? see issue #138
349        self.writer
350    }
351}
352
353impl<W: Write, C: CurveAffine> TranscriptWrite<C, Challenge255<C>>
354    for Blake2bWrite<W, C, Challenge255<C>>
355where
356    C::Scalar: FromUniformBytes<64>,
357{
358    fn write_point(&mut self, point: C) -> io::Result<()> {
359        self.common_point(point)?;
360        let compressed = point.to_bytes();
361        self.writer.write_all(compressed.as_ref())
362    }
363    fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
364        self.common_scalar(scalar)?;
365        let data = scalar.to_repr();
366        self.writer.write_all(data.as_ref())
367    }
368}
369
370impl<W: Write, C: CurveAffine> TranscriptWrite<C, Challenge255<C>>
371    for Keccak256Write<W, C, Challenge255<C>>
372where
373    C::Scalar: FromUniformBytes<64>,
374{
375    fn write_point(&mut self, point: C) -> io::Result<()> {
376        self.common_point(point)?;
377        let compressed = point.to_bytes();
378        self.writer.write_all(compressed.as_ref())
379    }
380    fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
381        self.common_scalar(scalar)?;
382        let data = scalar.to_repr();
383        self.writer.write_all(data.as_ref())
384    }
385}
386
387impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
388    for Blake2bWrite<W, C, Challenge255<C>>
389where
390    C::Scalar: FromUniformBytes<64>,
391{
392    fn squeeze_challenge(&mut self) -> Challenge255<C> {
393        self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]);
394        let hasher = self.state.clone();
395        let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap();
396        Challenge255::<C>::new(&result)
397    }
398
399    fn common_point(&mut self, point: C) -> io::Result<()> {
400        self.state.update(&[BLAKE2B_PREFIX_POINT]);
401        let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
402            io::Error::new(
403                io::ErrorKind::Other,
404                "cannot write points at infinity to the transcript",
405            )
406        })?;
407        self.state.update(coords.x().to_repr().as_ref());
408        self.state.update(coords.y().to_repr().as_ref());
409
410        Ok(())
411    }
412
413    fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
414        self.state.update(&[BLAKE2B_PREFIX_SCALAR]);
415        self.state.update(scalar.to_repr().as_ref());
416
417        Ok(())
418    }
419}
420
421impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
422    for Keccak256Write<W, C, Challenge255<C>>
423where
424    C::Scalar: FromUniformBytes<64>,
425{
426    fn squeeze_challenge(&mut self) -> Challenge255<C> {
427        self.state.update([KECCAK256_PREFIX_CHALLENGE]);
428
429        let mut state_lo = self.state.clone();
430        let mut state_hi = self.state.clone();
431        state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]);
432        state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]);
433        let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
434        let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();
435
436        let mut t = result_lo.to_vec();
437        t.extend_from_slice(&result_hi[..]);
438        let result: [u8; 64] = t.as_slice().try_into().unwrap();
439
440        Challenge255::<C>::new(&result)
441    }
442
443    fn common_point(&mut self, point: C) -> io::Result<()> {
444        self.state.update([KECCAK256_PREFIX_POINT]);
445        let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
446            io::Error::new(
447                io::ErrorKind::Other,
448                "cannot write points at infinity to the transcript",
449            )
450        })?;
451        self.state.update(coords.x().to_repr().as_ref());
452        self.state.update(coords.y().to_repr().as_ref());
453
454        Ok(())
455    }
456
457    fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
458        self.state.update([KECCAK256_PREFIX_SCALAR]);
459        self.state.update(scalar.to_repr().as_ref());
460
461        Ok(())
462    }
463}
464
465/// The scalar representation of a verifier challenge.
466///
467/// The `Type` type can be used to scope the challenge to a specific context, or
468/// set to `()` if no context is required.
469#[derive(Copy, Clone, Debug)]
470pub struct ChallengeScalar<C: CurveAffine, T> {
471    inner: C::Scalar,
472    _marker: PhantomData<T>,
473}
474
475impl<C: CurveAffine, T> std::ops::Deref for ChallengeScalar<C, T> {
476    type Target = C::Scalar;
477
478    fn deref(&self) -> &Self::Target {
479        &self.inner
480    }
481}
482
483/// `EncodedChallenge<C>` defines a challenge encoding with a [`Self::Input`]
484/// that is used to derive the challenge encoding and `get_challenge` obtains
485/// the _real_ `C::Scalar` that the challenge encoding represents.
486pub trait EncodedChallenge<C: CurveAffine> {
487    /// The Input type used to derive the challenge encoding. For example,
488    /// an input from the Poseidon hash would be a base field element;
489    /// an input from the Blake2b hash would be a [u8; 64].
490    type Input;
491
492    /// Get an encoded challenge from a given input challenge.
493    fn new(challenge_input: &Self::Input) -> Self;
494
495    /// Get a scalar field element from an encoded challenge.
496    fn get_scalar(&self) -> C::Scalar;
497
498    /// Cast an encoded challenge as a typed `ChallengeScalar`.
499    fn as_challenge_scalar<T>(&self) -> ChallengeScalar<C, T> {
500        ChallengeScalar {
501            inner: self.get_scalar(),
502            _marker: PhantomData,
503        }
504    }
505}
506
507/// A 255-bit challenge.
508#[derive(Copy, Clone, Debug)]
509pub struct Challenge255<C: CurveAffine>([u8; 32], PhantomData<C>);
510
511impl<C: CurveAffine> std::ops::Deref for Challenge255<C> {
512    type Target = [u8; 32];
513
514    fn deref(&self) -> &Self::Target {
515        &self.0
516    }
517}
518
519impl<C: CurveAffine> EncodedChallenge<C> for Challenge255<C>
520where
521    C::Scalar: FromUniformBytes<64>,
522{
523    type Input = [u8; 64];
524
525    fn new(challenge_input: &[u8; 64]) -> Self {
526        Challenge255(
527            C::Scalar::from_uniform_bytes(challenge_input)
528                .to_repr()
529                .as_ref()
530                .try_into()
531                .expect("Scalar fits into 256 bits"),
532            PhantomData,
533        )
534    }
535    fn get_scalar(&self) -> C::Scalar {
536        let mut repr = <C::Scalar as PrimeField>::Repr::default();
537        repr.as_mut().copy_from_slice(&self.0);
538        C::Scalar::from_repr(repr).unwrap()
539    }
540}
541
542pub(crate) fn read_n_points<C: CurveAffine, E: EncodedChallenge<C>, T: TranscriptRead<C, E>>(
543    transcript: &mut T,
544    n: usize,
545) -> io::Result<Vec<C>> {
546    (0..n).map(|_| transcript.read_point()).collect()
547}
548
549pub(crate) fn read_n_scalars<C: CurveAffine, E: EncodedChallenge<C>, T: TranscriptRead<C, E>>(
550    transcript: &mut T,
551    n: usize,
552) -> io::Result<Vec<C::Scalar>> {
553    (0..n).map(|_| transcript.read_scalar()).collect()
554}