use blake2b_simd::{Params as Blake2bParams, State as Blake2bState};
use group::ff::{FromUniformBytes, PrimeField};
use sha3::{Digest, Keccak256};
use std::convert::TryInto;
use halo2curves::{Coordinates, CurveAffine};
use std::io::{self, Read, Write};
use std::marker::PhantomData;
const BLAKE2B_PREFIX_CHALLENGE: u8 = 0;
const BLAKE2B_PREFIX_POINT: u8 = 1;
const BLAKE2B_PREFIX_SCALAR: u8 = 2;
const KECCAK256_PREFIX_CHALLENGE: u8 = 0;
const KECCAK256_PREFIX_CHALLENGE_LO: u8 = 10;
const KECCAK256_PREFIX_CHALLENGE_HI: u8 = 11;
const KECCAK256_PREFIX_POINT: u8 = 1;
const KECCAK256_PREFIX_SCALAR: u8 = 2;
pub trait Transcript<C: CurveAffine, E: EncodedChallenge<C>> {
fn squeeze_challenge(&mut self) -> E;
fn squeeze_challenge_scalar<T>(&mut self) -> ChallengeScalar<C, T> {
ChallengeScalar {
inner: self.squeeze_challenge().get_scalar(),
_marker: PhantomData,
}
}
fn common_point(&mut self, point: C) -> io::Result<()>;
fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>;
}
pub trait TranscriptRead<C: CurveAffine, E: EncodedChallenge<C>>: Transcript<C, E> {
fn read_point(&mut self) -> io::Result<C>;
fn read_scalar(&mut self) -> io::Result<C::Scalar>;
}
pub trait TranscriptWrite<C: CurveAffine, E: EncodedChallenge<C>>: Transcript<C, E> {
fn write_point(&mut self, point: C) -> io::Result<()>;
fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()>;
}
pub trait TranscriptReadBuffer<R: Read, C: CurveAffine, E: EncodedChallenge<C>>:
TranscriptRead<C, E>
{
fn init(reader: R) -> Self;
}
pub trait TranscriptWriterBuffer<W: Write, C: CurveAffine, E: EncodedChallenge<C>>:
TranscriptWrite<C, E>
{
fn init(writer: W) -> Self;
fn finalize(self) -> W;
}
#[derive(Debug, Clone)]
pub struct Blake2bRead<R: Read, C: CurveAffine, E: EncodedChallenge<C>> {
state: Blake2bState,
reader: R,
_marker: PhantomData<(C, E)>,
}
#[derive(Debug, Clone)]
pub struct Keccak256Read<R: Read, C: CurveAffine, E: EncodedChallenge<C>> {
state: Keccak256,
reader: R,
_marker: PhantomData<(C, E)>,
}
impl<R: Read, C: CurveAffine> TranscriptReadBuffer<R, C, Challenge255<C>>
for Blake2bRead<R, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn init(reader: R) -> Self {
Blake2bRead {
state: Blake2bParams::new()
.hash_length(64)
.personal(b"Halo2-Transcript")
.to_state(),
reader,
_marker: PhantomData,
}
}
}
impl<R: Read, C: CurveAffine> TranscriptReadBuffer<R, C, Challenge255<C>>
for Keccak256Read<R, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn init(reader: R) -> Self {
let mut state = Keccak256::new();
state.update(b"Halo2-Transcript");
Keccak256Read {
state,
reader,
_marker: PhantomData,
}
}
}
impl<R: Read, C: CurveAffine> TranscriptRead<C, Challenge255<C>>
for Blake2bRead<R, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn read_point(&mut self) -> io::Result<C> {
let mut compressed = C::Repr::default();
self.reader.read_exact(compressed.as_mut())?;
let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof")
})?;
self.common_point(point)?;
Ok(point)
}
fn read_scalar(&mut self) -> io::Result<C::Scalar> {
let mut data = <C::Scalar as PrimeField>::Repr::default();
self.reader.read_exact(data.as_mut())?;
let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"invalid field element encoding in proof",
)
})?;
self.common_scalar(scalar)?;
Ok(scalar)
}
}
impl<R: Read, C: CurveAffine> TranscriptRead<C, Challenge255<C>>
for Keccak256Read<R, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn read_point(&mut self) -> io::Result<C> {
let mut compressed = C::Repr::default();
self.reader.read_exact(compressed.as_mut())?;
let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof")
})?;
self.common_point(point)?;
Ok(point)
}
fn read_scalar(&mut self) -> io::Result<C::Scalar> {
let mut data = <C::Scalar as PrimeField>::Repr::default();
self.reader.read_exact(data.as_mut())?;
let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"invalid field element encoding in proof",
)
})?;
self.common_scalar(scalar)?;
Ok(scalar)
}
}
impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>> for Blake2bRead<R, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]);
let hasher = self.state.clone();
let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap();
Challenge255::<C>::new(&result)
}
fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update(&[BLAKE2B_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"cannot write points at infinity to the transcript",
)
})?;
self.state.update(coords.x().to_repr().as_ref());
self.state.update(coords.y().to_repr().as_ref());
Ok(())
}
fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update(&[BLAKE2B_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());
Ok(())
}
}
impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
for Keccak256Read<R, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update([KECCAK256_PREFIX_CHALLENGE]);
let mut state_lo = self.state.clone();
let mut state_hi = self.state.clone();
state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]);
let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();
let mut t = result_lo.to_vec();
t.extend_from_slice(&result_hi[..]);
let result: [u8; 64] = t.as_slice().try_into().unwrap();
Challenge255::<C>::new(&result)
}
fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update([KECCAK256_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"cannot write points at infinity to the transcript",
)
})?;
self.state.update(coords.x().to_repr().as_ref());
self.state.update(coords.y().to_repr().as_ref());
Ok(())
}
fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update([KECCAK256_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());
Ok(())
}
}
#[derive(Debug, Clone)]
pub struct Blake2bWrite<W: Write, C: CurveAffine, E: EncodedChallenge<C>> {
state: Blake2bState,
writer: W,
_marker: PhantomData<(C, E)>,
}
#[derive(Debug, Clone)]
pub struct Keccak256Write<W: Write, C: CurveAffine, E: EncodedChallenge<C>> {
state: Keccak256,
writer: W,
_marker: PhantomData<(C, E)>,
}
impl<W: Write, C: CurveAffine> TranscriptWriterBuffer<W, C, Challenge255<C>>
for Blake2bWrite<W, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn init(writer: W) -> Self {
Blake2bWrite {
state: Blake2bParams::new()
.hash_length(64)
.personal(b"Halo2-Transcript")
.to_state(),
writer,
_marker: PhantomData,
}
}
fn finalize(self) -> W {
self.writer
}
}
impl<W: Write, C: CurveAffine> TranscriptWriterBuffer<W, C, Challenge255<C>>
for Keccak256Write<W, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn init(writer: W) -> Self {
let mut state = Keccak256::new();
state.update(b"Halo2-Transcript");
Keccak256Write {
state,
writer,
_marker: PhantomData,
}
}
fn finalize(self) -> W {
self.writer
}
}
impl<W: Write, C: CurveAffine> TranscriptWrite<C, Challenge255<C>>
for Blake2bWrite<W, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn write_point(&mut self, point: C) -> io::Result<()> {
self.common_point(point)?;
let compressed = point.to_bytes();
self.writer.write_all(compressed.as_ref())
}
fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.common_scalar(scalar)?;
let data = scalar.to_repr();
self.writer.write_all(data.as_ref())
}
}
impl<W: Write, C: CurveAffine> TranscriptWrite<C, Challenge255<C>>
for Keccak256Write<W, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn write_point(&mut self, point: C) -> io::Result<()> {
self.common_point(point)?;
let compressed = point.to_bytes();
self.writer.write_all(compressed.as_ref())
}
fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.common_scalar(scalar)?;
let data = scalar.to_repr();
self.writer.write_all(data.as_ref())
}
}
impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
for Blake2bWrite<W, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update(&[BLAKE2B_PREFIX_CHALLENGE]);
let hasher = self.state.clone();
let result: [u8; 64] = hasher.finalize().as_bytes().try_into().unwrap();
Challenge255::<C>::new(&result)
}
fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update(&[BLAKE2B_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"cannot write points at infinity to the transcript",
)
})?;
self.state.update(coords.x().to_repr().as_ref());
self.state.update(coords.y().to_repr().as_ref());
Ok(())
}
fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update(&[BLAKE2B_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());
Ok(())
}
}
impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
for Keccak256Write<W, C, Challenge255<C>>
where
C::Scalar: FromUniformBytes<64>,
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update([KECCAK256_PREFIX_CHALLENGE]);
let mut state_lo = self.state.clone();
let mut state_hi = self.state.clone();
state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]);
let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();
let mut t = result_lo.to_vec();
t.extend_from_slice(&result_hi[..]);
let result: [u8; 64] = t.as_slice().try_into().unwrap();
Challenge255::<C>::new(&result)
}
fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update([KECCAK256_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"cannot write points at infinity to the transcript",
)
})?;
self.state.update(coords.x().to_repr().as_ref());
self.state.update(coords.y().to_repr().as_ref());
Ok(())
}
fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update([KECCAK256_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());
Ok(())
}
}
#[derive(Copy, Clone, Debug)]
pub struct ChallengeScalar<C: CurveAffine, T> {
inner: C::Scalar,
_marker: PhantomData<T>,
}
impl<C: CurveAffine, T> std::ops::Deref for ChallengeScalar<C, T> {
type Target = C::Scalar;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
pub trait EncodedChallenge<C: CurveAffine> {
type Input;
fn new(challenge_input: &Self::Input) -> Self;
fn get_scalar(&self) -> C::Scalar;
fn as_challenge_scalar<T>(&self) -> ChallengeScalar<C, T> {
ChallengeScalar {
inner: self.get_scalar(),
_marker: PhantomData,
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct Challenge255<C: CurveAffine>([u8; 32], PhantomData<C>);
impl<C: CurveAffine> std::ops::Deref for Challenge255<C> {
type Target = [u8; 32];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<C: CurveAffine> EncodedChallenge<C> for Challenge255<C>
where
C::Scalar: FromUniformBytes<64>,
{
type Input = [u8; 64];
fn new(challenge_input: &[u8; 64]) -> Self {
Challenge255(
C::Scalar::from_uniform_bytes(challenge_input)
.to_repr()
.as_ref()
.try_into()
.expect("Scalar fits into 256 bits"),
PhantomData,
)
}
fn get_scalar(&self) -> C::Scalar {
let mut repr = <C::Scalar as PrimeField>::Repr::default();
repr.as_mut().copy_from_slice(&self.0);
C::Scalar::from_repr(repr).unwrap()
}
}
pub fn read_n_points<C: CurveAffine, E: EncodedChallenge<C>, T: TranscriptRead<C, E>>(
transcript: &mut T,
n: usize,
) -> io::Result<Vec<C>> {
(0..n).map(|_| transcript.read_point()).collect()
}
pub fn read_n_scalars<C: CurveAffine, E: EncodedChallenge<C>, T: TranscriptRead<C, E>>(
transcript: &mut T,
n: usize,
) -> io::Result<Vec<C::Scalar>> {
(0..n).map(|_| transcript.read_scalar()).collect()
}