diff --git a/Cargo.toml b/Cargo.toml index a8f99d54..954bb85d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,8 @@ ark-std = "^0.4.0" ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = ["r1cs", "sponge"] } ark-relations = { version = "^0.4.0", default-features = false } ark-r1cs-std = { version = "^0.4.0", default-features = false } +tiny-keccak = { version = "2.0", features = ["keccak"] } +sha3 = "0.10.8" thiserror = "1.0" rayon = "1.7.0" diff --git a/src/pedersen.rs b/src/pedersen.rs index 16af2e5a..27663a22 100644 --- a/src/pedersen.rs +++ b/src/pedersen.rs @@ -93,22 +93,20 @@ impl Pedersen { mod tests { use super::*; use crate::transcript::poseidon::{tests::poseidon_test_config, PoseidonTranscript}; + use crate::transcript::keccak::{tests::keccak_test_config, KeccakTranscript}; + use crate::transcript::sha3::{tests::sha3_test_config, SHA3Transcript}; + use crate::transcript::Transcript; use ark_pallas::{Fr, Projective}; - #[test] - fn test_pedersen_vector() { + fn test_pedersen_vector_with>(config: T::TranscriptConfig) { + let mut transcript_p: T = Transcript::::new(&config); + let mut transcript_v: T = Transcript::::new(&config); let mut rng = ark_std::test_rng(); const n: usize = 10; // setup params let params = Pedersen::::new_params(&mut rng, n); - let poseidon_config = poseidon_test_config::(); - - // init Prover's transcript - let mut transcript_p = PoseidonTranscript::::new(&poseidon_config); - // init Verifier's transcript - let mut transcript_v = PoseidonTranscript::::new(&poseidon_config); let v: Vec = vec![Fr::rand(&mut rng); n]; let r: Fr = Fr::rand(&mut rng); @@ -117,4 +115,14 @@ mod tests { let v = Pedersen::::verify(¶ms, &mut transcript_v, cm, proof); assert!(v); } + + #[test] + fn test_pedersen_vector() { + // Test for Poseidon + test_pedersen_vector_with::>(poseidon_test_config::()); + // Test for Keccak + test_pedersen_vector_with::>(keccak_test_config::()); + // Test for SHA3 + test_pedersen_vector_with::>(sha3_test_config::()); + } } diff --git a/src/transcript/keccak.rs b/src/transcript/keccak.rs new file mode 100644 index 00000000..db3f517f --- /dev/null +++ b/src/transcript/keccak.rs @@ -0,0 +1,101 @@ +use std::marker::PhantomData; +use tiny_keccak::{Keccak, Hasher}; +use ark_ec::CurveGroup; +use ark_ff::{BigInteger, PrimeField}; + +use crate::transcript::Transcript; + +/// KecccakTranscript implements the Transcript trait using the Keccak hash +pub struct KeccakTranscript { + sponge: Keccak, + phantom: PhantomData, +} + +#[derive(Debug)] +pub struct KeccakConfig {} + +impl Transcript for KeccakTranscript { + type TranscriptConfig = KeccakConfig; + fn new(config: &Self::TranscriptConfig) -> Self { + let _ = config; + let sponge = Keccak::v256(); + Self { + sponge, + phantom: PhantomData, + } + } + + fn absorb(&mut self, v: &C::ScalarField) { + self.sponge.update(&(v.into_bigint().to_bytes_le())); + } + fn absorb_vec(&mut self, v: &[C::ScalarField]) { + for _v in v { + self.sponge.update(&(_v.into_bigint().to_bytes_le())); + } + } + fn absorb_point(&mut self, p: &C) { + let mut serialized = vec![]; + p.serialize_compressed(&mut serialized).unwrap(); + self.sponge.update(&(serialized)) + } + fn get_challenge(&mut self) -> C::ScalarField { + let mut output = [0u8; 32]; + self.sponge.clone().finalize(&mut output); + C::ScalarField::from_le_bytes_mod_order(&[output[0]]) + } + fn get_challenge_nbits(&mut self, nbits: usize) -> Vec { + // TODO + vec![] + } + fn get_challenges(&mut self, n: usize) -> Vec { + let mut output = [0u8; 32]; + self.sponge.clone().finalize(&mut output); + + let c: Vec = output + .iter() + .map(|c| C::ScalarField::from_le_bytes_mod_order(&[*c])) + .collect(); + c[..n].to_vec() + } +} + +#[cfg(test)] +pub mod tests { + use super::*; + use ark_pallas::{ + // constraints::GVar, + Fr, Projective + }; + use ark_std::UniformRand; + + /// WARNING the method poseidon_test_config is for tests only + #[cfg(test)] + pub fn keccak_test_config() -> KeccakConfig { + KeccakConfig {} + } + + #[test] + fn test_transcript_get_challenges_len() { + let mut rng = ark_std::test_rng(); + + const n: usize = 10; + let config = keccak_test_config::(); + + // init transcript + let mut transcript = KeccakTranscript::::new(&config); + let v: Vec = vec![Fr::rand(&mut rng); n]; + let challenges = transcript.get_challenges(v.len()); + assert_eq!(challenges.len(), n); + } + + #[test] + fn test_transcript_get_challenge() { + let config = keccak_test_config::(); + // init transcript + let mut transcript = KeccakTranscript::::new(&config); + transcript.absorb(&Fr::from(42_u32)); + let c = transcript.get_challenge(); + let c_2 = transcript.get_challenge(); + assert_eq!(c, c_2); + } +} diff --git a/src/transcript/mod.rs b/src/transcript/mod.rs index ccfc59c8..e7e68816 100644 --- a/src/transcript/mod.rs +++ b/src/transcript/mod.rs @@ -2,6 +2,8 @@ use ark_ec::CurveGroup; use ark_std::fmt::Debug; pub mod poseidon; +pub mod keccak; +pub mod sha3; pub trait Transcript { type TranscriptConfig: Debug; diff --git a/src/transcript/sha3.rs b/src/transcript/sha3.rs new file mode 100644 index 00000000..09b7fd52 --- /dev/null +++ b/src/transcript/sha3.rs @@ -0,0 +1,100 @@ +use std::marker::PhantomData; +use sha3::{Shake256, digest::*}; +use ark_ec::CurveGroup; +use ark_ff::{BigInteger, PrimeField}; + +use crate::transcript::Transcript; + +/// SHA3Transcript implements the Transcript trait using the Keccak hash +pub struct SHA3Transcript { + sponge: Shake256, + phantom: PhantomData, +} + +#[derive(Debug)] +pub struct SHA3Config {} + +impl Transcript for SHA3Transcript { + type TranscriptConfig = SHA3Config; + fn new(config: &Self::TranscriptConfig) -> Self { + let _ = config; + let sponge = Shake256::default(); + Self { + sponge, + phantom: PhantomData, + } + } + + fn absorb(&mut self, v: &C::ScalarField) { + self.sponge.update(&(v.into_bigint().to_bytes_le())); + } + fn absorb_vec(&mut self, v: &[C::ScalarField]) { + for _v in v { + self.sponge.update(&(_v.into_bigint().to_bytes_le())); + } + } + fn absorb_point(&mut self, p: &C) { + let mut serialized = vec![]; + p.serialize_compressed(&mut serialized).unwrap(); + self.sponge.update(&(serialized)) + } + fn get_challenge(&mut self) -> C::ScalarField { + let output = self.sponge.clone().finalize_boxed(200); + C::ScalarField::from_le_bytes_mod_order(&[output[0]]) + } + fn get_challenge_nbits(&mut self, nbits: usize) -> Vec { + // TODO + // should call finalize() then slice the output to n bit challenge + vec![] + } + fn get_challenges(&mut self, n: usize) -> Vec { + let output = self.sponge.clone().finalize_boxed(n); + + let c = output + .iter() + .map(|c| C::ScalarField::from_le_bytes_mod_order(&[*c])) + .collect(); + c + } +} + +#[cfg(test)] +pub mod tests { + use super::*; + use ark_pallas::{ + // constraints::GVar, + Fr, Projective + }; + use ark_std::UniformRand; + + /// WARNING the method poseidon_test_config is for tests only + #[cfg(test)] + pub fn sha3_test_config() -> SHA3Config { + SHA3Config {} + } + + #[test] + fn test_transcript_get_challenges_len() { + let mut rng = ark_std::test_rng(); + + const n: usize = 10; + let config = sha3_test_config::(); + + // init transcript + let mut transcript = SHA3Transcript::::new(&config); + let v: Vec = vec![Fr::rand(&mut rng); n]; + let challenges = transcript.get_challenges(v.len()); + assert_eq!(challenges.len(), n); + } + + #[test] + fn test_transcript_get_challenge() { + let config = sha3_test_config::(); + // init transcript + let mut transcript = SHA3Transcript::::new(&config); + transcript.absorb(&Fr::from(42_u32)); + let c = transcript.get_challenge(); + let c_2 = transcript.get_challenge(); + assert_eq!(c, c_2); + } +}