diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7f68372..c35100f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -79,6 +79,11 @@ jobs: check_no_std: name: Check no_std runs-on: ubuntu-latest + strategy: + matrix: + rust: + - stable + - nightly steps: - name: Checkout uses: actions/checkout@v2 diff --git a/.gitignore b/.gitignore index 6a0375b..c01bed6 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,9 @@ Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk -.idea \ No newline at end of file +# Editors +.idea +.vscode/** + +# Mac OSX +.DS_Store \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index fefd556..c3eeec3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,60 +1,53 @@ [package] name = "ark-ldt" -version = "0.1.0" +version = "1.1.0" authors = ["arkworks contributors"] -edition = "2018" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +edition = "2021" [dependencies] -ark-ff = { version = "^0.3.0", default-features = false } -ark-std = { version = "^0.3.0", default-features = false } -ark-r1cs-std = { version = "^0.3.0", default-features = false} -ark-sponge = { version = "^0.3.0", default-features = false } -ark-poly = { version = "0.3.0", default-features = false } -ark-relations = { version = "^0.3.0", default-features = false, optional = true} -tracing = { version = "0.1", default-features = false, features = [ "attributes" ], optional = true} - -[dev-dependencies] -ark-test-curves = { version = "^0.3.0", default-features = false, features = ["bls12_381_scalar_field", "mnt4_753_scalar_field"] } +ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = ["merkle_tree", "sponge"] } +ark-ff = { version = "0.4", default-features = false } +ark-poly = { version = "0.4.2", default-features = false } +ark-std = { version = "^0.4.0", default-features = false } +ark-serialize = { version = "0.4", default-features = false } +ark-test-curves = { version = "0.4", default-features = false, features = ["bls12_381_curve"] } +itertools = { version = "0.13.0", default-features = false } +spin = { version = "0.9.8", default-features = false, features = ["once", "mutex", "spin_mutex"] } +hashbrown = { version = "0.14.5" } [patch.crates-io] -ark-sponge = {git = "https://github.com/arkworks-rs/sponge"} -ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std" } +ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } ark-ec = { git = "https://github.com/arkworks-rs/algebra" } ark-ff = { git = "https://github.com/arkworks-rs/algebra" } -ark-poly = { git = "https://github.com/arkworks-rs/algebra" } ark-serialize = { git = "https://github.com/arkworks-rs/algebra" } ark-std = { git = "https://github.com/arkworks-rs/std" } ark-test-curves = { git = "https://github.com/arkworks-rs/algebra" } -[profile.release] -opt-level = 3 -lto = "thin" -incremental = true -panic = 'abort' - -[profile.bench] -opt-level = 3 -debug = false -rpath = false -lto = "thin" -incremental = true -debug-assertions = false - -[profile.dev] -opt-level = 0 -panic = 'abort' - -[profile.test] -opt-level = 3 -lto = "thin" -incremental = true -debug-assertions = true -debug = true - [features] default = ["std"] -std = ["ark-ff/std", "ark-std/std", "ark-relations/std", "ark-r1cs-std/std", "ark-sponge/std", "ark-poly/std"] -r1cs = ["ark-sponge/r1cs", "tracing", "ark-relations"] +std = [ "ark-crypto-primitives/std", "ark-ff/std", "ark-poly/std", "ark-serialize/std" ] +[profile.release] + opt-level = 3 + lto = "thin" + incremental = true + panic = 'abort' + + [profile.bench] + opt-level = 3 + debug = false + rpath = false + lto = "thin" + incremental = true + debug-assertions = false + + [profile.dev] + opt-level = 0 + panic = 'abort' + + [profile.test] + opt-level = 3 + lto = "thin" + incremental = true + debug-assertions = true + debug = true diff --git a/README.md b/README.md index d5fa55e..09bdc6f 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ This implementation is NOT ready for production use. ## Overview -A (univariate) low-degree test is an IOP that checks that a given function is close to a (univariate) polynomial of low degree. This library provides two LDTs: the **direct low-degree test** and the **FRI Protocol**. The library also comes with R1CS constraints for the LDT verifiers. Enable `r1cs` feature to use those constraints. +A (univariate) low-degree test is an IOP that checks that a given function is close to a (univariate) polynomial of low degree. This library provides three LDTs: **Direct**, **FRI**, and **STIR**. ## Build Guide @@ -44,8 +44,11 @@ To use this library, you need to add the following to your `Cargo.toml`. Note th ark-ldt = {git = "https://github.com/arkworks-rs/ldt", branch="main", default-features = false} [patch.crates-io] -ark-sponge = {git = "https://github.com/arkworks-rs/sponge"} -ark-r1cs-std = {git = "https://github.com/arkworks-rs/r1cs-std", branch = "master"} +ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" } +ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } +ark-ff = { git = "https://github.com/arkworks-rs/algebra" } +ark-serialize = { git = "https://github.com/arkworks-rs/algebra" } +ark-test-curves = { git = "https://github.com/arkworks-rs/algebra" } ``` ## License @@ -62,10 +65,14 @@ conditions. ## Reference papers [Fractal: Post-Quantum and Transparent Recursive Proofs from Holography][cos20]
-Alessandro Chiesa, Dev Ojha, Nicholas Spooner +Alessandro Chiesa, Dev Ojha, Nicholas Spooner -[Fast Reed-Solomon Interactive Oracle Proofs of Proximity][bbhr17]
+[FRI: Fast Reed-Solomon Interactive Oracle Proofs of Proximity][bbhr17]
Eli Ben-Sasson, Iddo Bentov, Ynon Horesh, Michael Riabzev +[STIR: Reed–Solomon Proximity Testing with Fewer Queries][acfy24]
+Gal Arnon, Alessandro Chiesa, Giacomo Fenzi, Eylon Yogev + [cos20]: https://eprint.iacr.org/2019/1076 [bbhr17]: https://eccc.weizmann.ac.il/report/2017/134/ +[acfy24]: https://eprint.iacr.org/2024/390 diff --git a/src/direct/config.rs b/src/direct/config.rs new file mode 100644 index 0000000..8c8c393 --- /dev/null +++ b/src/direct/config.rs @@ -0,0 +1,31 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, LeafParam, TwoToOneParam}, + sponge::CryptographicSponge, +}; + +#[derive(Clone)] +pub struct DirectConfig { + pub degree: usize, + pub num_challenges: usize, + pub merkle_leaf_hash_param: LeafParam, + pub merkle_two_to_one_param: TwoToOneParam, + pub sponge_config: S::Config, +} + +impl DirectConfig { + pub fn new( + degree: usize, + num_challenges: usize, + merkle_leaf_hash_param: LeafParam, + merkle_two_to_one_param: TwoToOneParam, + sponge_config: S::Config, + ) -> Self { + DirectConfig { + degree, + num_challenges, + merkle_leaf_hash_param, + merkle_two_to_one_param, + sponge_config, + } + } +} diff --git a/src/direct/constraints.rs b/src/direct/constraints.rs deleted file mode 100644 index cb4d51b..0000000 --- a/src/direct/constraints.rs +++ /dev/null @@ -1,40 +0,0 @@ -use ark_ff::PrimeField; -use ark_r1cs_std::boolean::Boolean; -use ark_r1cs_std::eq::EqGadget; -use ark_r1cs_std::fields::fp::FpVar; -use ark_r1cs_std::poly::polynomial::univariate::dense::DensePolynomialVar; -use ark_relations::r1cs::SynthesisError; -use ark_std::marker::PhantomData; - -/// Constraints for direct ldt. -pub struct DirectLDTGadget { - _marker: PhantomData, -} - -impl DirectLDTGadget { - /// ### Verifier Side - /// - /// The Direct LDT Verify function tests that given a list of coefficients `a_0, a_1, ..., a_{d-1}` - /// an evaluation point `x`, and claimed evaluation `y`, that `y = \sum_{i =0}^{d} a_i x^i`. - /// This proves that the provided coefficients of a degree `d` polynomial agree with the claimed - /// `(evaluation_point, claimed_evaluation)` pair. - /// This is used to construct a low degree test for an oracle to a claimed polynomials evaluations over a domain. - /// By sampling enough (domain_element, claimed_evaluation) pairs from the oracle, and testing them - /// via this method, you become convinced w.h.p. that the oracle is sufficiently close to the claimed coefficients list. - pub fn verify( - evaluation_point: FpVar, - claimed_evaluation: FpVar, - coefficients: &DensePolynomialVar, - degree_bound: usize, - ) -> Result, SynthesisError> { - // make sure the degree is within degree_bound. No need to include degree_bound check - // in constraints because the verifier can just verify the size of circuit. - assert!( - coefficients.coeffs.len() <= degree_bound + 1, - "polynomial degree out of bound" - ); - coefficients - .evaluate(&evaluation_point)? - .is_eq(&claimed_evaluation) - } -} diff --git a/src/direct/ldt.rs b/src/direct/ldt.rs new file mode 100644 index 0000000..8183018 --- /dev/null +++ b/src/direct/ldt.rs @@ -0,0 +1,50 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, MultiPath}, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::FftField; +use ark_std::{marker::PhantomData, vec::Vec}; + +use crate::{ + direct::{config::DirectConfig, prover::DirectProver, verifier::DirectVerifier}, + ldt::{LowDegreeTest, Prover, Verifier}, + witness::Witness, +}; + +use super::proof::DirectProof; + +pub struct DirectLDT> { + _field: PhantomData, + _merkle_config: PhantomData, + _sponge: PhantomData, + _witness: PhantomData, +} +impl LowDegreeTest for DirectLDT +where + F: FftField, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, + W: Witness< + F, + M, + MerkleConfig = M, + ChallengeAnswers = MultiPath, + CommittedValues = Vec>, + Challenges = Vec, + > + Clone, + W::ChallengeAnswers: Clone, +{ + type LDTConfig = DirectConfig; + type Proof = DirectProof; + type Prover = DirectProver; + type Verifier = DirectVerifier; + + fn new(config: Self::LDTConfig) -> (Self::Prover, Self::Verifier) { + ( + Self::Prover::new(config.clone()), + Self::Verifier::new(config), + ) + } +} diff --git a/src/direct/mod.rs b/src/direct/mod.rs index c06d701..a28a596 100644 --- a/src/direct/mod.rs +++ b/src/direct/mod.rs @@ -1,88 +1,65 @@ -/// R1CS constraints for DirectLDT -#[cfg(feature = "r1cs")] -pub mod constraints; - -use crate::domain::Radix2CosetDomain; -use ark_ff::PrimeField; -use ark_poly::univariate::DensePolynomial; -use ark_poly::Polynomial; -use ark_std::marker::PhantomData; -use ark_std::vec::Vec; -/// Direct LDT by interpolating evaluations and truncating coefficients to low degree. -/// /// This requires communication linear in the degree bound; use FRI for better communication complexity. -pub struct DirectLDT { - marker: PhantomData, -} - -/// A linear-communication protocol for testing if a function is a polynomial of certain degree. -/// Method is described in Aurora appendix C.1. -/// -/// For now, the domain of the function needs to support IFFT. -impl DirectLDT { - /// ### Prover Side - /// - /// Generate the coefficient of the low-degree polynomial obtained by interpolating the domain evaluations. - /// The polynomial is trimmed to `degree_bound` when necessary. - pub fn generate_low_degree_coefficients( - domain: Radix2CosetDomain, - codewords: Vec, - degree_bound: usize, - ) -> DensePolynomial { - let mut poly = domain.interpolate(codewords); - // trim higher degree: if poly is higher degree, then the soundness should fail - poly.coeffs.truncate(degree_bound + 1); - poly - } - - /// ### Verifier Side - /// - /// The Direct LDT Verify function tests that given a list of coefficients `a_0, a_1, ..., a_{d-1}` - /// an evaluation point `x`, and claimed evaluation `y`, that `y = \sum_{i =0}^{d} a_i x^i`. - /// This proves that the provided coefficients of a degree `d` polynomial agree with the claimed - /// `(evaluation_point, claimed_evaluation)` pair. - /// This is used to construct a low degree test for an oracle to a claimed polynomials evaluations over a domain. - /// By sampling enough (domain_element, claimed_evaluation) pairs from the oracle, and testing them - /// via this method, you become convinced w.h.p. that the oracle is sufficiently close to the claimed coefficients list. - pub fn verify( - evaluation_point: F, - claimed_evaluation: F, - bounded_coefficients: &DensePolynomial, - ) -> bool { - return bounded_coefficients.evaluate(&evaluation_point) == claimed_evaluation; - } -} +pub mod config; +pub mod ldt; +pub mod proof; +pub mod prover; +pub mod verifier; #[cfg(test)] mod tests { - use crate::direct::{DirectLDT, Radix2CosetDomain}; - use ark_ff::UniformRand; + use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_poly::univariate::DensePolynomial; use ark_poly::DenseUVPolynomial; use ark_std::test_rng; - use ark_test_curves::bls12_381::Fr; + + use crate::{ + direct::{config::DirectConfig, ldt::DirectLDT}, + domain::Domain, + ldt::{LowDegreeTest, Prover, Verifier}, + test_helpers::{fields::Field256, fs, merkle_tree}, + witness::{ + single::{SingleWitness, SingleWitnessArgument}, + Witness, + }, + }; + + type TestField = Field256; + type TestMerkleConfig = merkle_tree::poseidon::MerkleTreeParams; + type TestSpongeConfig = PoseidonSponge; + type TestWitness = SingleWitness; #[test] fn test_direct_ldt() { - let degree = 51; - + // get ready let mut rng = test_rng(); - let poly = DensePolynomial::::rand(degree, &mut rng); - let domain_coset = Radix2CosetDomain::new_radix2_coset(52, Fr::rand(&mut rng)); - let evaluations = domain_coset.evaluate(&poly); + let (merkle_leaf_hash_param, merkle_two_to_one_param) = + merkle_tree::poseidon::default_config::(&mut rng, 2); + let config: DirectConfig = DirectConfig { + degree: 22, + num_challenges: 2, + merkle_leaf_hash_param: merkle_leaf_hash_param.clone(), + merkle_two_to_one_param: merkle_two_to_one_param.clone(), + sponge_config: fs::poseidon::poseidon_test_config::(), + }; + let (prover, verifier) = + DirectLDT::::new( + config.clone(), + ); - let low_degree_poly = DirectLDT::generate_low_degree_coefficients( - domain_coset.clone(), - evaluations.to_vec(), - degree, - ); + // generate witness + let witness: SingleWitness = + SingleWitness::new(SingleWitnessArgument { + coeff: DensePolynomial::::rand(config.degree, &mut rng), + domain: Domain::::new(config.degree, 0).unwrap(), + folding_factor: 1, + merkle_leaf_hash_param, + merkle_two_to_one_param, + sponge_config: config.sponge_config, + }); - let sampled_element = domain_coset.element(15); - let sampled_evaluation = evaluations[15]; + // prove + let direct_proof = prover.prove(&witness); - assert!(DirectLDT::verify( - sampled_element, - sampled_evaluation, - &low_degree_poly - )) + // verify + assert_eq!(verifier.verify(&witness.statement(), &direct_proof), true); } } diff --git a/src/direct/proof.rs b/src/direct/proof.rs new file mode 100644 index 0000000..6c41d48 --- /dev/null +++ b/src/direct/proof.rs @@ -0,0 +1,85 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, LeafParam, MultiPath, TwoToOneParam}, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::FftField; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use crate::utils::{dedup, squeeze_integer}; + +pub struct DirectProof +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, +{ + challenge_answers: MultiPath, + committed_values: Vec>, + merkle_leaf_hash_param: LeafParam, + merkle_two_to_one_param: TwoToOneParam, + sponge_config: S::Config, +} + +impl DirectProof +where + F: FftField, + M: MerkleConfig>, + M::InnerDigest: Absorb, + S: CryptographicSponge, +{ + pub fn new( + challenge_answers: MultiPath, + committed_values: Vec>, + merkle_leaf_hash_param: LeafParam, + merkle_two_to_one_param: TwoToOneParam, + sponge_config: S::Config, + ) -> Self { + Self { + challenge_answers, + committed_values, + merkle_leaf_hash_param, + merkle_two_to_one_param, + sponge_config, + } + } + pub fn challenges( + &self, + commitment_digest: M::InnerDigest, + num_challenges: usize, + ) -> Vec { + // absorb committment digest + let mut sponge = S::new(&self.sponge_config); + sponge.absorb(&commitment_digest); + // squeeze out the challenges as indices + let mut challenges = Vec::with_capacity(num_challenges); + for _ in 0..num_challenges { + challenges.push(squeeze_integer(&mut sponge, self.committed_values.len())); + } + dedup(challenges) + } + pub fn num_committed_values(&self) -> usize { + self.committed_values.len() + } + pub fn verify(&self, commitment_digest: M::InnerDigest, challenges: Vec) -> bool { + if self.challenge_answers.leaf_indexes != challenges { + return false; + } + + let challenge_values: Vec> = self + .challenge_answers + .leaf_indexes + .iter() + .map(|&i| self.committed_values.get(i).unwrap().clone()) + .collect(); + self.challenge_answers + .verify( + &self.merkle_leaf_hash_param, + &self.merkle_two_to_one_param, + &commitment_digest, + challenge_values, + ) + .unwrap() + } +} diff --git a/src/direct/prover.rs b/src/direct/prover.rs new file mode 100644 index 0000000..1a327d8 --- /dev/null +++ b/src/direct/prover.rs @@ -0,0 +1,73 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, MultiPath}, + sponge::{Absorb, CryptographicSponge}, +}; + +use ark_ff::FftField; +use ark_std::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use crate::{ + direct::{config::DirectConfig, proof::DirectProof}, + ldt::Prover, + witness::Witness, +}; + +pub struct DirectProver +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, + W: Witness, +{ + config: DirectConfig, + _field: PhantomData, + _merkle_config: PhantomData, + _sponge: PhantomData, + _witness: PhantomData, +} + +impl Prover for DirectProver +where + F: FftField, + M: MerkleConfig>, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, + W: Witness< + F, + M, + MerkleConfig = M, + CommittedValues = Vec>, + ChallengeAnswers = MultiPath, + Challenges = Vec, + > + Clone, + W::ChallengeAnswers: Clone, +{ + type Witness = W; + type ProverConfig = DirectConfig; + type Proof = DirectProof; + + fn new(config: DirectConfig) -> Self { + Self { + config, + _field: PhantomData::, + _merkle_config: PhantomData::, + _sponge: PhantomData::, + _witness: PhantomData::, + } + } + + fn prove(&self, witness: &W) -> Self::Proof { + let challenges = witness.challenges(self.config.num_challenges); + DirectProof::::new( + witness.challenge_answers(challenges), + witness.committed_values(), + self.config.merkle_leaf_hash_param.clone(), + self.config.merkle_two_to_one_param.clone(), + self.config.sponge_config.clone(), + ) + } +} diff --git a/src/direct/verifier.rs b/src/direct/verifier.rs new file mode 100644 index 0000000..e52eb01 --- /dev/null +++ b/src/direct/verifier.rs @@ -0,0 +1,67 @@ +use ark_crypto_primitives::{ + merkle_tree::Config as MerkleConfig, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::FftField; +use ark_std::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use crate::{ + direct::{config::DirectConfig, proof::DirectProof}, + ldt::Verifier, + statement::single::SingleStatement, + utils::{dedup, squeeze_integer}, + witness::Witness, +}; + +pub struct DirectVerifier +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, + W: Witness, +{ + config: DirectConfig, + _field: PhantomData, + _merkle_config: PhantomData, + _sponge: PhantomData, + _witness: PhantomData, +} +impl Verifier for DirectVerifier +where + F: FftField, + M: MerkleConfig>, + M::InnerDigest: Absorb, + S: CryptographicSponge, + W: Witness, + W::ChallengeAnswers: Clone, +{ + type Statement = SingleStatement; + type VerifierConfig = DirectConfig; + type Proof = DirectProof; + + fn new(config: DirectConfig) -> Self { + Self { + config, + _field: PhantomData::, + _merkle_config: PhantomData::, + _sponge: PhantomData::, + _witness: PhantomData::, + } + } + fn verify(&self, statement: &Self::Statement, proof: &Self::Proof) -> bool { + // regenerate the challenges + let mut sponge = S::new(&self.config.sponge_config); + sponge.absorb(&statement.commitment_digest()); + // squeeze out the challenges as indices + let mut challenges = Vec::with_capacity(self.config.num_challenges); + for _ in 0..self.config.num_challenges { + challenges.push(squeeze_integer(&mut sponge, proof.num_committed_values())); + } + challenges = dedup(challenges); + // verifiy the proof against the claim + proof.verify(statement.commitment_digest(), challenges) + } +} diff --git a/src/domain.rs b/src/domain.rs new file mode 100644 index 0000000..42415af --- /dev/null +++ b/src/domain.rs @@ -0,0 +1,190 @@ +use ark_ff::FftField; +use ark_poly::{ + EvaluationDomain, GeneralEvaluationDomain, MixedRadixEvaluationDomain, Radix2EvaluationDomain, +}; +use ark_std::ops::Deref; + +#[derive(Debug, Clone)] +pub struct Domain { + pub root_of_unity: F, + pub root_of_unity_inv: F, + pub backing_domain: GeneralEvaluationDomain, +} + +impl Domain { + pub fn new(degree: usize, log_rho_inv: usize) -> Option { + let size = degree * (1 << log_rho_inv); + let backing_domain = GeneralEvaluationDomain::new(size)?; + let root_of_unity: F = match backing_domain { + GeneralEvaluationDomain::Radix2(r2) => r2.group_gen, + GeneralEvaluationDomain::MixedRadix(mr) => mr.group_gen, + }; + let root_of_unity_inv = match backing_domain { + GeneralEvaluationDomain::Radix2(r2) => r2.group_gen_inv, + GeneralEvaluationDomain::MixedRadix(mr) => mr.group_gen_inv, + }; + Some(Self { + backing_domain, + root_of_unity, + root_of_unity_inv, + }) + } + + pub fn size(&self) -> usize { + self.backing_domain.size() + } + + // Takes the underlying backing_domain = , and computes the new domain + // (note this will have size |L| / power) + // NOTE: This should not be mixed with scale_offset + fn scale_generator_by(&self, power: usize) -> GeneralEvaluationDomain { + let starting_size = self.size(); + assert_eq!(starting_size % power, 0); + let new_size = starting_size / power; + let log_size_of_group = new_size.trailing_zeros(); + let size_as_field_element = F::from(new_size as u64); + + match self.backing_domain { + GeneralEvaluationDomain::Radix2(r2) => { + let group_gen = r2.group_gen.pow([power as u64]); + let group_gen_inv = group_gen.inverse().unwrap(); + + let offset = r2.offset.pow([power as u64]); + let offset_inv = r2.offset_inv.pow([power as u64]); + let offset_pow_size = offset.pow([new_size as u64]); + + GeneralEvaluationDomain::Radix2(Radix2EvaluationDomain { + size: new_size as u64, + log_size_of_group, + size_as_field_element, + size_inv: size_as_field_element.inverse().unwrap(), + group_gen, + group_gen_inv, + offset, + offset_inv, + offset_pow_size, + }) + } + GeneralEvaluationDomain::MixedRadix(mr) => { + let group_gen = mr.group_gen.pow([power as u64]); + let group_gen_inv = mr.group_gen_inv.pow([power as u64]); + + let offset = mr.offset.pow([power as u64]); + let offset_inv = mr.offset_inv.pow([power as u64]); + let offset_pow_size = offset.pow([new_size as u64]); + + GeneralEvaluationDomain::MixedRadix(MixedRadixEvaluationDomain { + size: new_size as u64, + log_size_of_group, + size_as_field_element, + size_inv: size_as_field_element.inverse().unwrap(), + group_gen, + group_gen_inv, + offset, + offset_inv, + offset_pow_size, + }) + } + } + } + + // Take a domain L_0 = o * and compute a new domain L_1 = w * o^power * . + // Note that L_0^k \cap L_1 = \emptyset for k > power. + fn scale_with_offset(&self, power: usize) -> GeneralEvaluationDomain { + let starting_size = self.size(); + assert_eq!(starting_size % power, 0); + let new_size = starting_size / power; + let log_size_of_group = new_size.trailing_zeros(); + let size_as_field_element = F::from(new_size as u64); + match self.backing_domain { + GeneralEvaluationDomain::Radix2(r2) => { + let group_gen = r2.group_gen.pow([power as u64]); + let group_gen_inv = r2.group_gen_inv.pow([power as u64]); + + let offset = r2.offset.pow([power as u64]) * self.root_of_unity; + let offset_inv = r2.offset_inv.pow([power as u64]) * self.root_of_unity_inv; + + GeneralEvaluationDomain::Radix2(Radix2EvaluationDomain { + size: new_size as u64, + log_size_of_group, + size_as_field_element, + size_inv: size_as_field_element.inverse().unwrap(), + group_gen, + group_gen_inv, + offset, + offset_inv, + offset_pow_size: offset.pow([new_size as u64]), + }) + } + GeneralEvaluationDomain::MixedRadix(mr) => { + let group_gen = mr.group_gen.pow([power as u64]); + let group_gen_inv = mr.group_gen_inv.pow([power as u64]); + + let offset = mr.offset.pow([power as u64]) * self.root_of_unity; + let offset_inv = mr.offset_inv.pow([power as u64]) * self.root_of_unity_inv; + + GeneralEvaluationDomain::MixedRadix(MixedRadixEvaluationDomain { + size: new_size as u64, + log_size_of_group, + size_as_field_element, + size_inv: size_as_field_element.inverse().unwrap(), + group_gen, + group_gen_inv, + offset, + offset_inv, + offset_pow_size: offset.pow([new_size as u64]), + }) + } + } + } + + pub fn scale(&self, power: usize) -> Self { + Self { + backing_domain: self.scale_generator_by(power), + ..*self + } + } + + pub fn scale_offset(&self, power: usize) -> Self { + Self { + backing_domain: self.scale_with_offset(power), + ..*self + } + } +} + +impl Deref for Domain { + type Target = GeneralEvaluationDomain; + + fn deref(&self) -> &Self::Target { + &self.backing_domain + } +} + +#[cfg(test)] +mod tests { + use hashbrown::HashSet; + + use super::*; + use crate::test_helpers::fields::Field64 as TestField; + + #[test] + fn test_non_overlapping() { + let folding_factor = 16; + + let l_0 = Domain::::new(64, 2).unwrap(); + + let l_0_k = l_0.scale(folding_factor); + let l_1 = l_0.scale_offset(2); + let l_1_k = l_1.scale_offset(folding_factor); + let l_2 = l_1.scale_offset(2); + + let l_0_k_elements: HashSet<_> = l_0_k.elements().collect(); + let l_1_elements: HashSet<_> = l_1.elements().collect(); + let l_1_k_elements: HashSet<_> = l_1_k.elements().collect(); + let l_2_elements: HashSet<_> = l_2.elements().collect(); + + assert_eq!(l_0_k_elements.intersection(&l_1_elements).count(), 0); + assert_eq!(l_1_k_elements.intersection(&l_2_elements).count(), 0); + } +} diff --git a/src/domain/mod.rs b/src/domain/mod.rs deleted file mode 100644 index f70878e..0000000 --- a/src/domain/mod.rs +++ /dev/null @@ -1,312 +0,0 @@ -use ark_ff::PrimeField; -use ark_poly::polynomial::univariate::DensePolynomial; -use ark_poly::{ - DenseUVPolynomial, EvaluationDomain, Evaluations, Polynomial, Radix2EvaluationDomain, -}; -#[cfg(feature = "r1cs")] -use ark_r1cs_std::bits::boolean::Boolean; -#[cfg(feature = "r1cs")] -use ark_r1cs_std::fields::fp::FpVar; -#[cfg(feature = "r1cs")] -use ark_r1cs_std::fields::FieldVar; -#[cfg(feature = "r1cs")] -use ark_relations::r1cs::SynthesisError; -use ark_std::vec::Vec; - -/// Given domain as ``, `CosetOfDomain` represents `h` -/// -/// Constraint equivalent is in `r1cs_std::poly::domain`. -#[derive(Clone, Copy, Eq, PartialEq, Debug)] -pub struct Radix2CosetDomain { - /// A non-coset radix 2 domain: `` - pub base_domain: Radix2EvaluationDomain, - /// offset `h` - pub offset: F, -} - -// TODO: Move this to algebra, per https://github.com/arkworks-rs/algebra/issues/88#issuecomment-734963835 -impl Radix2CosetDomain { - /// Returns a new coset domain. - pub fn new(base_domain: Radix2EvaluationDomain, offset: F) -> Self { - Radix2CosetDomain { - base_domain, - offset, - } - } - - /// Returns a coset of size of power of two. - pub fn new_radix2_coset(coset_size: usize, offset: F) -> Self { - Self::new(Radix2EvaluationDomain::new(coset_size).unwrap(), offset) - } - - /// Converts a query position to the elements of the unique coset of size `log_coset_size` - /// within this domain that the query lies in. - /// `query_position` is an index within this domain. - /// Returns the positions of coset elements in `self`, - /// and the coset represented as a Radix2CosetDomain. - pub fn query_position_to_coset( - &self, - query_position: usize, - log_coset_size: usize, - ) -> (Vec, Self) { - // make sure coset position is not out of range - assert!( - log_coset_size < self.base_domain.log_size_of_group as usize, - "query coset size too large" - ); - assert!( - query_position < (1 << (self.base_domain.log_size_of_group - log_coset_size as u32)), - "coset position out of range" - ); - - let dist_between_coset_elems = - 1 << (self.base_domain.log_size_of_group as usize - log_coset_size); - - // generate coset - let c = Self::new_radix2_coset( - 1 << log_coset_size, - self.offset * self.gen().pow(&[query_position as u64]), - ); - // c.base_domain.group_gen = self.gen().pow(&[1 << (self.dim() - log_coset_size)]); - // c.base_domain.group_gen_inv = c.base_domain.group_gen.inverse().unwrap(); // not necessary - - // generate positions - let mut indices = Vec::with_capacity(1 << log_coset_size); - for i in 0..(1 << log_coset_size) { - indices.push(query_position + i * dist_between_coset_elems) - } - - (indices, c) - } - - /// returns the size of the domain - pub fn size(&self) -> usize { - self.base_domain.size() - } - - /// return the log 2 size of domain - pub fn dim(&self) -> usize { - self.base_domain.log_size_of_group as usize - } - - /// returns generator of the coset - pub fn gen(&self) -> F { - self.base_domain.group_gen - } - - /// Given f(x) = \sum a_i x^i. Returns g(x) = \sum a_i h^i x^i - /// - /// Note that g(x) = f(hx) - fn add_offset_to_coeffs(&self, poly: &DensePolynomial) -> DensePolynomial { - let mut r = F::one(); - let mut transformed_coeff = Vec::with_capacity(poly.coeffs.len()); - for &coeff in poly.coeffs.iter() { - transformed_coeff.push(coeff * r); - r *= self.offset - } - DensePolynomial::from_coefficients_vec(transformed_coeff) - } - - /// Given g(x) = \sum a_i h^i x^i. Returns f(x) = \sum a_i x^i - /// - /// Note that g(x) = f(hx) - fn remove_offset_from_coeffs(&self, poly: &DensePolynomial) -> DensePolynomial { - let mut r = F::one(); - let h_inv = self.offset.inverse().unwrap(); - let mut transformed_coeff = Vec::with_capacity(poly.coeffs.len()); - for &coeff in poly.coeffs.iter() { - transformed_coeff.push(coeff * r); - r *= h_inv - } - DensePolynomial::from_coefficients_vec(transformed_coeff) - } - - /// Evaluate polynomial on this coset - pub fn evaluate(&self, poly: &DensePolynomial) -> Vec { - if self.size() < poly.degree() + 1 { - // we use naive method for evaluating a polynomial larger than the domain size. - // TODO: use a more efficient method using the fact that: - // (hg)^{|base_domain|} = h^{|base_domain|}, - // so we can efficiently fold the polynomial's coefficients on itself, - // into a single polynomial of degree `self.size() - 1` - return self - .base_domain - .elements() - .map(|g| poly.evaluate(&(self.offset * g))) - .collect(); - } - // g(x) = f(hx). So, f(coset) = g(base_domain) - let gx = self.add_offset_to_coeffs(poly); - gx.evaluate_over_domain(self.base_domain.clone()).evals - } - - /// given evaluation over this coset. Interpolate and returns coefficients. - pub fn interpolate(&self, evaluations: Vec) -> DensePolynomial { - assert_eq!(evaluations.len(), self.base_domain.size()); - // first get g(x) - let gx = Evaluations::from_vec_and_domain(evaluations, self.base_domain).interpolate(); - // g(x) = f(hx). Let g(x) = \sum a_i h^i x^i. Then f(x) = \sum a_i x^i - let fx = self.remove_offset_from_coeffs(&gx); - fx - } - - /// Returns an element of the coset - pub fn element(&self, i: usize) -> F { - self.base_domain.element(i) * self.offset - } - - #[cfg(feature = "r1cs")] - /// Returns an element fo the coset, given the index as a variable. - pub fn element_var(&self, index: &[Boolean]) -> Result, SynthesisError> { - Ok(FpVar::constant(self.offset) * FpVar::constant(self.gen()).pow_le(index)?) - } - - /// Shrink the domain size such that new domain size = `self.size() / (1 << log_shrink_factor)` - /// and has same offset. - pub fn fold(&self, log_shrink_factor: u64) -> Radix2CosetDomain { - let coset_size = 1 << log_shrink_factor; - let domain_size = self.base_domain.size; - let dist_between_coset_elems = domain_size / coset_size; - Radix2CosetDomain::new_radix2_coset(dist_between_coset_elems as usize, self.offset) - } -} - -#[cfg(test)] -mod tests { - use ark_poly::univariate::DensePolynomial; - use ark_poly::{DenseUVPolynomial, Polynomial}; - use ark_std::{test_rng, UniformRand}; - use ark_test_curves::bls12_381::Fr; - - use crate::domain::Radix2CosetDomain; - - #[cfg(feature = "r1cs")] - mod consistency_with_constraints { - use ark_poly::univariate::DensePolynomial; - use ark_poly::Radix2EvaluationDomain; - use ark_poly::{DenseUVPolynomial, EvaluationDomain, Polynomial}; - use ark_r1cs_std::alloc::AllocVar; - use ark_r1cs_std::fields::fp::FpVar; - use ark_r1cs_std::fields::FieldVar; - use ark_r1cs_std::poly::domain::Radix2DomainVar; - use ark_r1cs_std::poly::evaluations::univariate::EvaluationsVar; - use ark_r1cs_std::R1CSVar; - use ark_relations::r1cs::ConstraintSystem; - use ark_std::{test_rng, UniformRand}; - use ark_test_curves::bls12_381::Fr; - - use crate::domain::Radix2CosetDomain; - - #[test] - fn test_consistency_with_coset_constraints() { - let mut rng = test_rng(); - let degree = 51; - let poly = DensePolynomial::::rand(degree, &mut rng); - let base_domain = Radix2EvaluationDomain::new(degree + 1).unwrap(); - let offset = Fr::rand(&mut rng); - let coset = Radix2CosetDomain::new(base_domain, offset); - - // test evaluation - let expected_eval: Vec<_> = coset - .base_domain - .elements() - .map(|x| poly.evaluate(&(offset * x))) - .collect(); - let actual_eval = coset.evaluate(&poly); - assert_eq!(actual_eval, expected_eval); - - // test interpolation - let interpolated_poly = coset.interpolate(expected_eval.to_vec()); - assert_eq!(interpolated_poly, poly); - - // test consistency with r1cs-std - let cs = ConstraintSystem::new_ref(); - let eval_var: Vec<_> = expected_eval - .iter() - .map(|x| FpVar::new_witness(ark_relations::ns!(cs, "eval_var"), || Ok(*x)).unwrap()) - .collect(); - - let r1cs_coset = Radix2DomainVar::new( - base_domain.group_gen, - ark_std::log2(degree.next_power_of_two()) as u64, - FpVar::constant(offset), - ) - .unwrap(); - let eval_var = EvaluationsVar::from_vec_and_domain(eval_var, r1cs_coset, true); - - let pt = Fr::rand(&mut rng); - let pt_var = - FpVar::new_witness(ark_relations::ns!(cs, "random point"), || Ok(pt)).unwrap(); - - let expected = poly.evaluate(&pt); - let actual = eval_var.interpolate_and_evaluate(&pt_var).unwrap(); - - assert_eq!(actual.value().unwrap(), expected); - assert!(cs.is_satisfied().unwrap()); - } - } - - #[test] - fn query_coset_test() { - let mut rng = test_rng(); - let poly = DensePolynomial::rand(4, &mut rng); - - let offset = Fr::rand(&mut rng); - let domain_coset = Radix2CosetDomain::new_radix2_coset(15, offset); - - let evals_on_domain_coset = domain_coset.evaluate(&poly); - assert_eq!( - poly.evaluate(&domain_coset.element(2)), - evals_on_domain_coset[2] - ); - - let (query_coset_pos, query_coset) = domain_coset.query_position_to_coset(2, 2); - - assert_eq!(query_coset_pos, vec![2, 6, 10, 14]); - - assert_eq!(query_coset.element(0), domain_coset.element(2)); - assert_eq!(query_coset.element(1), domain_coset.element(6)); - assert_eq!(query_coset.element(2), domain_coset.element(10)); - assert_eq!(query_coset.element(3), domain_coset.element(14)); - - assert_eq!( - query_coset.evaluate(&poly), - vec![ - evals_on_domain_coset[2], - evals_on_domain_coset[6], - evals_on_domain_coset[10], - evals_on_domain_coset[14] - ] - ) - } - - #[test] - #[cfg(feature = "r1cs")] - fn element_var_test() { - use ark_r1cs_std::alloc::AllocVar; - use ark_r1cs_std::uint64::UInt64; - use ark_r1cs_std::R1CSVar; - use ark_relations::r1cs::ConstraintSystem; - use ark_relations::*; - - let mut rng = test_rng(); - let offset = Fr::rand(&mut rng); - let domain_coset = Radix2CosetDomain::new_radix2_coset(15, offset); - - let cs = ConstraintSystem::new_ref(); - let index = 11; - let index_var = UInt64::new_witness(ns!(cs, "index"), || Ok(index)) - .unwrap() - .to_bits_le(); - - let expected = domain_coset.element(index as usize); - let actual = domain_coset - .element_var(&index_var) - .unwrap() - .value() - .unwrap(); - - assert_eq!(expected, actual); - assert!(cs.is_satisfied().unwrap()) - } -} diff --git a/src/fri/constraints/mod.rs b/src/fri/constraints/mod.rs deleted file mode 100644 index 1a3b48c..0000000 --- a/src/fri/constraints/mod.rs +++ /dev/null @@ -1,369 +0,0 @@ -#![allow(unused)] // temporary -use crate::direct::constraints::DirectLDTGadget; -use crate::domain::Radix2CosetDomain; -use crate::fri::FRIParameters; -use ark_ff::PrimeField; -use ark_r1cs_std::bits::boolean::Boolean; -use ark_r1cs_std::eq::EqGadget; -use ark_r1cs_std::fields::fp::FpVar; -use ark_r1cs_std::fields::FieldVar; -use ark_r1cs_std::poly::domain::Radix2DomainVar; -use ark_r1cs_std::poly::evaluations::univariate::EvaluationsVar; -use ark_r1cs_std::poly::polynomial::univariate::dense::DensePolynomialVar; -use ark_r1cs_std::prelude::CondSelectGadget; -use ark_relations::r1cs::SynthesisError; -use ark_sponge::constraints::CryptographicSpongeVar; -use ark_sponge::FieldBasedCryptographicSponge; -use ark_std::marker::PhantomData; -use ark_std::vec::Vec; - -/// Constraints for FRI verifier. -pub struct FRIVerifierGadget { - _marker: PhantomData, -} - -impl FRIVerifierGadget { - /// ## Step 1: Interative Phase - /// Sample alpha in interactive phase. - pub fn interactive_phase_single_round< - S: FieldBasedCryptographicSponge, - SV: CryptographicSpongeVar, - >( - sponge_var: &mut SV, - ) -> Result, SynthesisError> { - Ok(sponge_var - .squeeze_field_elements(1)? - .first() - .unwrap() - .clone()) - } - - /// ## Step 2: Sample Queried Coset - /// Sample the coset to be queried. - pub fn sample_coset_index< - S: FieldBasedCryptographicSponge, - SV: CryptographicSpongeVar, - >( - sponge_var: &mut SV, - fri_parameters: &FRIParameters, - ) -> Result>, SynthesisError> { - let log_num_cosets = - fri_parameters.domain.dim() - fri_parameters.localization_parameters[0] as usize; - sponge_var.squeeze_bits(log_num_cosets) - } - - /// ## Step 2: Query Phase (Prepare Query) - /// Prepare one query given the random coset index. The returned value `queries[i]` is the coset query - /// of the `ith` round polynomial (including codeword but does not include final polynomial). - /// Final polynomial is not queried. Instead, verifier will get - /// the whole final polynomial in evaluation form, and do direct LDT. - /// - /// Returns the all query domains, and query coset index, final polynomial domain - pub fn prepare_query( - rand_coset_index: Vec>, - fri_parameters: &FRIParameters, - ) -> Result< - ( - Vec>, - Vec>>, - Radix2CosetDomain, - ), - SynthesisError, - > { - let num_fri_rounds = fri_parameters.localization_parameters.len(); - let mut coset_indices = Vec::new(); - let mut curr_coset_index = rand_coset_index; - let mut queries = Vec::with_capacity(num_fri_rounds); - let mut curr_round_domain = fri_parameters.domain; - - // sample coset index - for i in 0..num_fri_rounds { - let log_dist_between_coset_elems = - curr_round_domain.dim() - fri_parameters.localization_parameters[i] as usize; - curr_coset_index = curr_coset_index[..log_dist_between_coset_elems].to_vec(); - - coset_indices.push(curr_coset_index.clone()); - - // get the query coset from coset index - let query_gen = fri_parameters.domain.gen().pow(&[1 - << (fri_parameters.domain.dim() - - fri_parameters.localization_parameters[i] as usize)]); - debug_assert_eq!( - query_gen.pow(&[1 << fri_parameters.localization_parameters[i]]), - F::one() - ); - - let query_offset = &FpVar::constant(curr_round_domain.offset) - * &(FpVar::constant(curr_round_domain.gen()).pow_le(&curr_coset_index)?); - - let query_coset = Radix2DomainVar::new( - query_gen, - fri_parameters.localization_parameters[i], - query_offset, - )?; - - queries.push(query_coset); - - curr_round_domain = curr_round_domain.fold(fri_parameters.localization_parameters[i]) - } - - Ok((queries, coset_indices, curr_round_domain)) - } - - /// Map coset in current round to a single point in next round. - /// - /// Essentially, this function interpolates the polynomial and evaluate on `alpha`. - fn expected_evaluation( - coset: &Radix2DomainVar, - queried_evaluations: Vec>, - alpha: FpVar, - ) -> Result, SynthesisError> { - let evaluations = - EvaluationsVar::from_vec_and_domain(queried_evaluations, coset.clone(), true); - evaluations.interpolate_and_evaluate(&alpha) - } - - /// ## Step 3: Decision Phase (Check query) - /// After preparing the query, verifier get the evaluations of corresponding query. Those evaluations needs - /// to be checked by merkle tree. Then verifier calls this method to check if polynomial sent in each round - /// is consistent with each other, and the final polynomial is low-degree. - /// - /// `queries[i]` is the coset query of the `ith` round polynomial, including the codeword polynomial. - /// `queried_evaluations` stores the result of corresponding query. - pub fn consistency_check( - fri_parameters: &FRIParameters, - queried_coset_indices: &[Vec>], - queries: &[Radix2DomainVar], - queried_evaluations: &[Vec>], - alphas: &[FpVar], - final_polynomial_domain: &Radix2CosetDomain, - final_polynomial: &DensePolynomialVar, - ) -> Result, SynthesisError> { - let mut expected_next_round_eval = FpVar::zero(); - - debug_assert_eq!(fri_parameters.localization_parameters.len(), queries.len()); - let mut check_result = Boolean::constant(true); - for i in 0..queries.len() { - expected_next_round_eval = FRIVerifierGadget::expected_evaluation( - &queries[i], - queried_evaluations[i].clone(), - alphas[i].clone(), - )?; - - // check if current round result is consistent with next round polynomial (if next round is not final) - if i < queries.len() - 1 { - let next_localization_param = - fri_parameters.localization_parameters[i + 1] as usize; - let log_next_dist_between_coset_elems = - fri_parameters.log_round_coset_sizes[i + 1] - next_localization_param; - // native code: queried_coset_indices[i] >> log_next_dist_between_coset_elems - let next_intra_coset_index = - &queried_coset_indices[i][log_next_dist_between_coset_elems..]; - - let actual = FpVar::::conditionally_select_power_of_two_vector( - next_intra_coset_index, - &queried_evaluations[i + 1], - )?; - - check_result = check_result.and(&expected_next_round_eval.is_eq(&actual)?)?; - } - } - - // check final polynomial (low degree & consistency check) - // We assume degree_bound is power of 2. - assert!(fri_parameters.tested_degree.is_power_of_two()); - let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum(); - let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor; - - let final_element_index = queried_coset_indices.last().unwrap(); - - DirectLDTGadget::verify( - final_polynomial_domain.element_var(final_element_index)?, - expected_next_round_eval, - final_polynomial, - final_poly_degree_bound as usize, - ) - } -} - -#[cfg(test)] -mod tests { - use crate::direct::DirectLDT; - use crate::domain::Radix2CosetDomain; - use crate::fri::constraints::FRIVerifierGadget; - use crate::fri::prover::FRIProver; - use crate::fri::verifier::FRIVerifier; - use crate::fri::FRIParameters; - use ark_poly::polynomial::univariate::DensePolynomial; - use ark_poly::DenseUVPolynomial; - use ark_r1cs_std::alloc::AllocVar; - use ark_r1cs_std::bits::uint64::UInt64; - use ark_r1cs_std::fields::fp::FpVar; - use ark_r1cs_std::poly::polynomial::univariate::dense::DensePolynomialVar; - use ark_r1cs_std::R1CSVar; - use ark_relations::r1cs::ConstraintSystem; - use ark_relations::*; - use ark_std::{test_rng, UniformRand}; - use ark_test_curves::bls12_381::Fr; - - #[test] - fn test_prepare_query() { - let mut rng = test_rng(); - let offset = Fr::rand(&mut rng); - let domain_input = Radix2CosetDomain::new_radix2_coset(1 << 7, offset); - - let fri_parameters = FRIParameters::new(32, vec![1, 2, 1], domain_input); - - let rand_coset_index = 31usize; - let cs = ConstraintSystem::new_ref(); - let rand_coset_index_var = - UInt64::new_witness(ns!(cs, "rand_coset_index"), || Ok(rand_coset_index as u64)) - .unwrap(); - let rand_coset_index_var_arr = rand_coset_index_var.to_bits_le()[..(1 << 6)].to_vec(); - - let rand_coset_index = 31; - let (query_cosets, query_indices, domain_final) = - FRIVerifier::prepare_query(rand_coset_index, &fri_parameters); - let (query_cosets_actual, query_indices_actual, domain_final_actual) = - FRIVerifierGadget::prepare_query(rand_coset_index_var_arr, &fri_parameters).unwrap(); - - for i in 0..query_cosets.len() { - assert_eq!( - query_cosets_actual[i].offset().value().unwrap(), - query_cosets[i].offset - ); - assert_eq!(query_cosets_actual[i].gen, query_cosets[i].gen()); - assert_eq!(query_cosets_actual[i].dim as usize, query_cosets[i].dim()); - } - - assert_eq!(domain_final, domain_final_actual) - } - - #[test] - fn two_rounds_fri_test() { - let cs = ConstraintSystem::new_ref(); - - let mut rng = test_rng(); - let poly = DensePolynomial::rand(64, &mut rng); - let offset = Fr::rand(&mut rng); - let domain_input = Radix2CosetDomain::new_radix2_coset(128, offset); - let evaluations_input = domain_input.evaluate(&poly); - - // set up verifier parameters - let fri_parameters = FRIParameters::new(64, vec![1, 2, 2], domain_input); - let alphas: Vec<_> = (0..3).map(|_| Fr::rand(&mut rng)).collect(); - let alphas_var: Vec<_> = alphas - .iter() - .map(|x| FpVar::new_witness(ns!(cs, "alphas"), || Ok(x)).unwrap()) - .collect(); - - // prover commits all round polynomial - let (domain_round_0, evaluations_round_0) = FRIProver::interactive_phase_single_round( - domain_input, - evaluations_input.clone(), - fri_parameters.localization_parameters[0], - alphas[0], - ); - - let (domain_round_1, evaluations_round_1) = FRIProver::interactive_phase_single_round( - domain_round_0, - evaluations_round_0.clone(), - fri_parameters.localization_parameters[1], - alphas[1], - ); - - let (expected_domain_final, evaluations_final) = FRIProver::interactive_phase_single_round( - domain_round_1, - evaluations_round_1.clone(), - fri_parameters.localization_parameters[2], - alphas[2], - ); - - let rand_coset_index = 31; - let rand_coset_index_var = - UInt64::new_witness(ns!(cs, "rand_coset_index"), || Ok(rand_coset_index)) - .unwrap() - .to_bits_le(); - - let (query_cosets, query_indices, domain_final) = - FRIVerifierGadget::prepare_query(rand_coset_index_var, &fri_parameters).unwrap(); - let (_, query_indices_native, _) = - FRIVerifier::prepare_query(rand_coset_index as usize, &fri_parameters); - - assert_eq!(query_indices.len(), 3); - assert_eq!(domain_final, expected_domain_final); - - let (indices, qi) = domain_input.query_position_to_coset( - query_indices_native[0], - fri_parameters.localization_parameters[0] as usize, - ); - assert_eq!(qi.offset, query_cosets[0].offset().value().unwrap()); - let answer_input: Vec<_> = indices - .iter() - .map(|&i| { - FpVar::new_witness(ns!(cs, "answer_input"), || Ok(evaluations_input[i])).unwrap() - }) - .collect(); - - let (indices, q0) = domain_round_0.query_position_to_coset( - query_indices_native[1], - fri_parameters.localization_parameters[1] as usize, - ); - assert_eq!(q0.offset, query_cosets[1].offset().value().unwrap()); - let answer_round_0: Vec<_> = indices - .iter() - .map(|&i| { - FpVar::new_witness( - ns!(cs, "evaluations_round_0"), - || Ok(evaluations_round_0[i]), - ) - .unwrap() - }) - .collect(); - - let (indices, q1) = domain_round_1.query_position_to_coset( - query_indices_native[2], - fri_parameters.localization_parameters[2] as usize, - ); - let answer_round_1: Vec<_> = indices - .iter() - .map(|&i| { - FpVar::new_witness( - ns!(cs, "evaluations_round_1"), - || Ok(evaluations_round_1[i]), - ) - .unwrap() - }) - .collect(); - assert_eq!(q1.offset, query_cosets[2].offset().value().unwrap()); - - let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum(); - let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor; - let final_polynomial = DirectLDT::generate_low_degree_coefficients( - domain_final, - evaluations_final, - final_poly_degree_bound as usize, - ); - let final_polynomial_coeffs: Vec<_> = final_polynomial - .coeffs() - .iter() - .map(|x| FpVar::new_witness(ns!(cs, "final_poly_coeff"), || Ok(*x)).unwrap()) - .collect(); - let final_polynomial_var = - DensePolynomialVar::from_coefficients_slice(&final_polynomial_coeffs); - - let result = FRIVerifierGadget::consistency_check( - &fri_parameters, - &query_indices, - &query_cosets, - &vec![answer_input, answer_round_0, answer_round_1], - &alphas_var, - &domain_final, - &final_polynomial_var, - ) - .unwrap(); - - assert!(result.value().unwrap()); - assert!(cs.is_satisfied().unwrap()); - } -} diff --git a/src/fri/mod.rs b/src/fri/mod.rs deleted file mode 100644 index 7d2aabf..0000000 --- a/src/fri/mod.rs +++ /dev/null @@ -1,56 +0,0 @@ -use crate::domain::Radix2CosetDomain; -use ark_ff::PrimeField; -use ark_std::marker::PhantomData; -use ark_std::vec::Vec; -/// R1CS constraints for FRI Verifier. -#[cfg(feature = "r1cs")] -pub mod constraints; -/// Prover used by FRI protocol. -pub mod prover; -/// Verifier used by FRI protocol. -pub mod verifier; - -/// Some parameters used by FRI verifiers. -#[derive(Clone)] -pub struct FRIParameters { - /// The degree - pub tested_degree: u64, - /// At each round `i`, domain size will shrink to `last_round_domain_size` / `localization_parameters[i]`^2 - pub localization_parameters: Vec, - /// Evaluation domain, which is represented as a coset. - pub domain: Radix2CosetDomain, - /// coset sizes in each round (first round is input coset) - log_round_coset_sizes: Vec, -} - -impl FRIParameters { - /// Check parameter validity and returns new `FRIParameters`. - pub fn new( - tested_degree: u64, - localization_parameters: Vec, - domain: Radix2CosetDomain, - ) -> Self { - assert!( - domain.size() >= tested_degree as usize + 1, - "Evaluations is not low degree!\ - Domain size needs to be >= tested_degree + 1" - ); - let mut log_round_coset_sizes = Vec::new(); - log_round_coset_sizes.push(domain.dim()); - for i in 0..localization_parameters.len() { - log_round_coset_sizes - .push(log_round_coset_sizes[i] - localization_parameters[i] as usize) - } - FRIParameters { - tested_degree, - localization_parameters, - domain, - log_round_coset_sizes, - } - } -} - -/// Fast Reed-Solomon Interactive Oracle Proof of Proximity -pub struct FRI { - _protocol: PhantomData, -} diff --git a/src/fri/prover.rs b/src/fri/prover.rs deleted file mode 100644 index b7a73bd..0000000 --- a/src/fri/prover.rs +++ /dev/null @@ -1,250 +0,0 @@ -use crate::domain::Radix2CosetDomain; -use ark_ff::{batch_inversion_and_mul, PrimeField}; -use ark_r1cs_std::poly::evaluations::univariate::lagrange_interpolator::LagrangeInterpolator; -use ark_std::marker::PhantomData; -use ark_std::vec::Vec; -/// FRI Prover -pub struct FRIProver { - _prover: PhantomData, -} - -impl FRIProver { - /// Single round prover in commit phase. Returns the evaluation oracles for next round. - /// - /// Returns domain for next round polynomial and evaluations over the domain. - pub fn interactive_phase_single_round_naive( - domain: Radix2CosetDomain, - evaluation_oracles_over_domain: Vec, - localization_param: u64, - alpha: F, - ) -> (Radix2CosetDomain, Vec) { - let coset_size = 1 << localization_param; - let domain_size = domain.base_domain.size; - let dist_between_coset_elems = domain_size / coset_size; - let mut new_evals = Vec::with_capacity(dist_between_coset_elems as usize); - let coset_generator = domain - .gen() - .pow(&[1 << (domain.dim() as u64 - localization_param)]); - let mut cur_coset_offset = domain.offset; - - for coset_index in 0..dist_between_coset_elems { - let mut poly_evals = Vec::new(); - for intra_coset_index in 0..coset_size { - poly_evals.push( - evaluation_oracles_over_domain - [(coset_index + intra_coset_index * dist_between_coset_elems) as usize], - ); - } - - let interpolator = LagrangeInterpolator::new( - cur_coset_offset, - coset_generator, - localization_param, - poly_evals, - ); - new_evals.push(interpolator.interpolate(alpha)); - cur_coset_offset *= domain.gen(); - } - - let c = Radix2CosetDomain::new_radix2_coset(new_evals.len(), domain.offset); - // c.base_domain.group_gen = coset_generator; - // c.base_domain.group_gen_inv = coset_generator.inverse().unwrap(); - debug_assert_eq!(coset_generator.pow(&[new_evals.len() as u64]), F::one()); - debug_assert_eq!(c.size(), new_evals.len()); - (c, new_evals) - } - - /// Single round prover in commit phase. Returns the polynomial for next round - /// represented by evaluations over domain in next round. - /// - /// Returns domain for next round polynomial and evaluations over the domain. - pub fn interactive_phase_single_round( - domain: Radix2CosetDomain, - evals_over_domain: Vec, - localization_param: u64, - alpha: F, - ) -> (Radix2CosetDomain, Vec) { - let coset_size = 1 << localization_param; - let num_cosets = domain.size() / coset_size; - let mut next_f_i = Vec::with_capacity(num_cosets); // new_evals - - let h_inc = domain.gen(); - let h_inc_to_coset_inv_plus_one = - h_inc.pow(&[coset_size as u64]).inverse().unwrap() * h_inc; - - let shiftless_coset = Radix2CosetDomain::new_radix2_coset(coset_size, F::one()); - let g = shiftless_coset.gen(); - let g_inv = g.inverse().unwrap(); - let x_to_order_coset = alpha.pow(&[coset_size as u64]); - - // x * g^{-k} - let mut shifted_x_elements = Vec::with_capacity(coset_size); - shifted_x_elements.push(alpha); - for i in 1..coset_size { - shifted_x_elements.push(shifted_x_elements[i - 1] * g_inv); - } - - let mut cur_h = domain.offset; - let first_h_to_coset_inv_plus_one = - cur_h.pow(&[coset_size as u64]).inverse().unwrap() * cur_h; - let mut cur_coset_constant_plus_h = x_to_order_coset * first_h_to_coset_inv_plus_one; - - /* x * g^{-k} - h, for all combinations of k, h. */ - let mut elements_to_invert = Vec::with_capacity(evals_over_domain.len()); - - /* constant for each coset, equal to - * vp_coset(x) / h^{|coset| - 1} = x^{|coset|} h^{-|coset| + 1} - h */ - let mut constant_for_each_coset = Vec::with_capacity(num_cosets); - - let constant_for_all_cosets = F::from(coset_size as u128).inverse().unwrap(); - let mut x_ever_in_domain = false; - let mut x_coset_index = 0; - let mut x_index_in_domain = 0; - - /* First we create all the constants for each coset, - and the entire vector of elements to invert, xg^{-k} - h. - */ - - for j in 0..num_cosets { - /* coset constant = x^|coset| * h^{1 - |coset|} - h */ - let coset_constant: F = cur_coset_constant_plus_h - cur_h; - constant_for_each_coset.push(coset_constant); - /* coset_constant = vp_coset(x) * h^{-|coset| + 1}, - since h is non-zero, coset_constant is zero iff vp_coset(x) is zero. - If vp_coset(x) is zero, then x is in the coset. */ - let x_in_coset = coset_constant.is_zero(); - /* if x is in the coset, we mark which position x is within f_i_domain, - and we pad elements to invert to simplify inversion later. */ - if x_in_coset { - x_ever_in_domain = true; - x_coset_index = j; - // find which element in the coset x belongs to. - // also pad elements_to_invert to simplify indexing - let mut cur_elem = cur_h; - for k in 0..coset_size { - if cur_elem == alpha { - x_index_in_domain = k * num_cosets + j; - } - cur_elem *= g; - elements_to_invert.push(F::one()); - } - continue; - } - - /* Append all elements to invert, (xg^{-k} - h) */ - for k in 0..coset_size { - elements_to_invert.push(shifted_x_elements[k] - cur_h); - } - - cur_h *= h_inc; - /* coset constant = x^|coset| * h^{1 - |coset|} - h - So we can efficiently increment x^|coset| * h^{1 - |coset|} */ - cur_coset_constant_plus_h *= h_inc_to_coset_inv_plus_one; - } - /* Technically not lagrange coefficients, its missing the constant for each coset */ - batch_inversion_and_mul(&mut elements_to_invert, &constant_for_all_cosets); - let lagrange_coefficients = elements_to_invert; - for j in 0..num_cosets { - let mut interpolation = F::zero(); - for k in 0..coset_size { - interpolation += evals_over_domain[k * num_cosets + j] - * lagrange_coefficients[j * coset_size + k]; - } - /* Multiply the constant for each coset, to get the correct interpolation */ - interpolation *= constant_for_each_coset[j]; - next_f_i.push(interpolation); - } - - /* if x ever in domain, correct that evaluation. */ - if x_ever_in_domain { - next_f_i[x_coset_index] = evals_over_domain[x_index_in_domain]; - } - - // domain definition - let c = domain.fold(localization_param); - - (c, next_f_i) - } -} - -#[cfg(test)] -pub mod tests { - use crate::direct::DirectLDT; - use crate::domain::Radix2CosetDomain; - use crate::fri::prover::FRIProver; - use ark_poly::univariate::DensePolynomial; - use ark_poly::DenseUVPolynomial; - use ark_std::{test_rng, UniformRand}; - use ark_test_curves::bls12_381::Fr; - - #[test] - fn efficient_prover_consistency_test() { - let degree = 32; - - let mut rng = test_rng(); - let poly = DensePolynomial::::rand(degree, &mut rng); - let domain_coset = Radix2CosetDomain::new_radix2_coset(64, Fr::rand(&mut rng)); - let evaluations = domain_coset.evaluate(&poly); - - // fri prover should reduce its degree - let alpha = Fr::rand(&mut rng); - let localization = 2; - let (expected_domain_next_round, expected_eval_next_round) = - FRIProver::interactive_phase_single_round_naive( - domain_coset, - evaluations.to_vec(), - localization, - alpha, - ); - - let (actual_domain_next_round, actual_eval_next_round) = - FRIProver::interactive_phase_single_round( - domain_coset, - evaluations.to_vec(), - localization, - alpha, - ); - - assert_eq!(actual_domain_next_round, expected_domain_next_round); - assert_eq!(actual_eval_next_round, expected_eval_next_round); - } - - #[test] - fn degree_reduction_test() { - let degree = 32; - - let mut rng = test_rng(); - let poly = DensePolynomial::::rand(degree, &mut rng); - let domain_coset = Radix2CosetDomain::new_radix2_coset(64, Fr::rand(&mut rng)); - let evaluations = domain_coset.evaluate(&poly); - - // fri prover should reduce its degree - let alpha = Fr::rand(&mut rng); - let localization = 2; - let (domain_next_round, eval_next_round) = FRIProver::interactive_phase_single_round( - domain_coset.clone(), - evaluations.to_vec(), - localization, - alpha, - ); - - let low_degree_poly = DirectLDT::generate_low_degree_coefficients( - domain_next_round.clone(), - eval_next_round.to_vec(), - degree / (1 << localization), - ); - - let sampled_element = domain_next_round.element(15); - let sampled_evaluation = eval_next_round[15]; - - assert!(DirectLDT::verify( - sampled_element, - sampled_evaluation, - &low_degree_poly - )); - - // test `fold_domain` - let fold_domain = domain_coset.fold(localization); - assert_eq!(fold_domain, domain_next_round); - } -} diff --git a/src/fri/verifier.rs b/src/fri/verifier.rs deleted file mode 100644 index f62952b..0000000 --- a/src/fri/verifier.rs +++ /dev/null @@ -1,342 +0,0 @@ -use ark_std::marker::PhantomData; - -use crate::direct::DirectLDT; -use crate::domain::Radix2CosetDomain; -use crate::fri::FRIParameters; -use ark_ff::PrimeField; -use ark_poly::polynomial::univariate::DensePolynomial; -use ark_poly::Polynomial; -use ark_sponge::FieldBasedCryptographicSponge; -use ark_std::vec::Vec; - -/// Implements FRI verifier. -pub struct FRIVerifier { - _verifier: PhantomData, -} - -impl FRIVerifier { - /// ## Step 1: Interative Phase - /// Sample alpha in interactive phase. - pub fn interactive_phase_single_round>( - sponge: &mut S, - ) -> F { - sponge.squeeze_native_field_elements(1)[0] - } - - /// ## Step 2: Sample Queried Coset - /// Sample the coset to be queried. - pub fn sample_coset_index>( - sponge: &mut S, - fri_parameters: &FRIParameters, - ) -> usize { - let log_num_cosets = - fri_parameters.domain.dim() - fri_parameters.localization_parameters[0] as usize; - // we use the fact that number of cosets is always power of two - let rand_coset_index = le_bits_array_to_usize(&sponge.squeeze_bits(log_num_cosets)); - rand_coset_index - } - - /// ## Step 2: Query Phase (Prepare Query) - /// Prepare one query given the random coset index. The returned value `queries[i]` is the coset query - /// of the `ith` round polynomial (including codeword but does not include final polynomial). - /// Final polynomial is not queried. Instead, verifier will get - /// the whole final polynomial in evaluation form, and do direct LDT. - /// - /// Returns the all query domains, and query coset index, final polynomial domain - pub fn prepare_query( - rand_coset_index: usize, - fri_parameters: &FRIParameters, - ) -> (Vec>, Vec, Radix2CosetDomain) { - let num_fri_rounds = fri_parameters.localization_parameters.len(); - let mut coset_indices = Vec::new(); - let mut curr_coset_index = rand_coset_index; - let mut queries = Vec::with_capacity(num_fri_rounds); - let mut curr_round_domain = fri_parameters.domain; - // sample a coset index - for i in 0..num_fri_rounds { - // current coset index = last coset index % (distance between coset at current round) - // edge case: at first round, this still applies - - let dist_between_coset_elems = - curr_round_domain.size() / (1 << fri_parameters.localization_parameters[i]); - curr_coset_index = curr_coset_index % dist_between_coset_elems; - - coset_indices.push(curr_coset_index); - - let (_, query_coset) = curr_round_domain.query_position_to_coset( - curr_coset_index, - fri_parameters.localization_parameters[i] as usize, - ); - - queries.push(query_coset); - - // get next round coset size, and next round domain - curr_round_domain = curr_round_domain.fold(fri_parameters.localization_parameters[i]); - } - - (queries, coset_indices, curr_round_domain) - } - - /// ## Step 2: Query Phase (Prepare Query) - /// Prepare all queries given the sampled random coset indices. - /// - /// The first returned value `queries[i][j]` is the coset query - /// of the `j`th round polynomial (including codeword but does not include final polynomial) for `i`th query. - /// - /// The second returned value `indices[i][j]` is the coset index - /// of the `j`th round polynomial (including codeword but does not include final polynomial) for `i`th query. - /// - /// The last returned value `final[i]` is the final polynomial domain at round `i`. - pub fn batch_prepare_queries( - rand_coset_indices: &[usize], - fri_parameters: &FRIParameters, - ) -> ( - Vec>>, - Vec>, - Vec>, - ) { - let mut queries = Vec::with_capacity(rand_coset_indices.len()); - let mut indices = Vec::with_capacity(rand_coset_indices.len()); - let mut finals = Vec::with_capacity(rand_coset_indices.len()); - - rand_coset_indices - .iter() - .map(|&i| Self::prepare_query(i, fri_parameters)) - .for_each(|(query, index, fp)| { - queries.push(query); - indices.push(index); - finals.push(fp); - }); - - (queries, indices, finals) - } - - /// ## Step 3: Decision Phase (Check query) - /// After preparing the query, verifier get the evaluations of corresponding query. Those evaluations needs - /// to be checked by merkle tree. Then verifier calls this method to check if polynomial sent in each round - /// is consistent with each other, and the final polynomial is low-degree. - /// - /// `queries[i]` is the coset query of the `ith` round polynomial, including the codeword polynomial. - /// `queried_evaluations` stores the result of corresponding query. - pub fn consistency_check( - fri_parameters: &FRIParameters, - queried_coset_indices: &[usize], - queries: &[Radix2CosetDomain], - queried_evaluations: &[Vec], - alphas: &[F], - final_polynomial_domain: &Radix2CosetDomain, - final_polynomial: &DensePolynomial, - ) -> bool { - let mut expected_next_round_eval = F::zero(); - - debug_assert_eq!(fri_parameters.localization_parameters.len(), queries.len()); - for i in 0..queries.len() { - expected_next_round_eval = FRIVerifier::expected_evaluation( - &queries[i], - queried_evaluations[i].clone(), - alphas[i], - ); - - // check if current round result is consistent with next round polynomial (if next round is not final) - if i < queries.len() - 1 { - let next_localization_param = - fri_parameters.localization_parameters[i + 1] as usize; - let log_next_dist_between_coset_elems = - fri_parameters.log_round_coset_sizes[i + 1] - next_localization_param; - let next_intra_coset_index = - queried_coset_indices[i] >> log_next_dist_between_coset_elems; - - let actual = queried_evaluations[i + 1][next_intra_coset_index]; - if expected_next_round_eval != actual { - return false; - } - } - } - - // check final polynomial (low degree & consistency check) - // We assume degree_bound is power of 2. - assert!(fri_parameters.tested_degree.is_power_of_two()); - let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum(); - let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor; - - let final_element_index = *queried_coset_indices.last().unwrap(); - - assert!( - final_polynomial.degree() <= final_poly_degree_bound as usize, - "final polynomial degree is too large!" - ); - DirectLDT::verify( - final_polynomial_domain.element(final_element_index), - expected_next_round_eval, - &final_polynomial, - ) - } - - /// ## Step 3: Decision Phase (Check query) - /// After preparing all queries, verifier gets the evaluations of corresponding query. Those evaluations needs - /// to be checked by merkle tree. Then verifier calls this method to check if polynomial sent in each round - /// is consistent with each other, and the final polynomial is low-degree. - /// - /// * `all_queried_coset_indices[i][j]` is the `j`th round query coset index of `i`th query - /// * `all_queries_domains[i][j]` is the `j`th round query coset of `i`th query - /// * `all_queried_evaluations[i][j]` is a vector storing corresponding evaluations at `all_queries_domains[i][j]` - /// * `alphas[i]` is the randomness used by the polynomial - /// * `all_final_polynomial_domain[i]` is the final polynomial domain for `i`th query - /// * `all_final_polynomials` is the final polynomial for `i`th query - pub fn batch_consistency_check( - fri_parameters: &FRIParameters, - all_queried_coset_indices: &[Vec], - all_queries_domains: &[Vec>], - all_queried_evaluations: &[Vec>], - alphas: &[F], - all_final_polynomial_domain: &[Radix2CosetDomain], - all_final_polynomials: &[DensePolynomial], - ) -> bool { - for i in 0..all_queried_coset_indices.len() { - let result = Self::consistency_check( - fri_parameters, - &all_queried_coset_indices[i], - &all_queries_domains[i], - &all_queried_evaluations[i], - alphas, - &all_final_polynomial_domain[i], - &all_final_polynomials[i], - ); - if !result { - return false; - } - } - true - } - - /// Map coset in current round to a single point in next round. - /// - /// Essentially, this function interpolates the polynomial and evaluate on `alpha`. - #[inline] - fn expected_evaluation( - coset: &Radix2CosetDomain, - queried_evaluations: Vec, - alpha: F, - ) -> F { - let poly = coset.interpolate(queried_evaluations); - poly.evaluate(&alpha) - } -} - -fn le_bits_array_to_usize(bits: &[bool]) -> usize { - let mut result = 0; - for &bit in bits { - result += bit as usize; - result *= 2; - } - result -} - -#[cfg(test)] -mod tests { - use ark_ff::UniformRand; - use ark_poly::univariate::DensePolynomial; - use ark_poly::{DenseUVPolynomial, Polynomial}; - use ark_std::test_rng; - use ark_test_curves::bls12_381::Fr; - - use crate::direct::DirectLDT; - use crate::domain::Radix2CosetDomain; - use crate::fri::prover::FRIProver; - use crate::fri::verifier::FRIVerifier; - use crate::fri::FRIParameters; - - #[test] - fn two_rounds_fri_test() { - // First, generate a low degree polynomial, and its evaluations. - let mut rng = test_rng(); - let poly = DensePolynomial::rand(32, &mut rng); - let offset = Fr::rand(&mut rng); - let domain_input = Radix2CosetDomain::new_radix2_coset(128, offset); - let evaluations_input = domain_input.evaluate(&poly); - - // Set up verifier parameter - let fri_parameters = FRIParameters::new(32, vec![1, 2, 1], domain_input); - let alphas: Vec<_> = (0..3).map(|_| Fr::rand(&mut rng)).collect(); - - // prover commits all round polynomial - let (domain_round_0, evaluations_round_0) = FRIProver::interactive_phase_single_round( - domain_input, - evaluations_input.clone(), - fri_parameters.localization_parameters[0], - alphas[0], - ); - - let (domain_round_1, evaluations_round_1) = FRIProver::interactive_phase_single_round( - domain_round_0, - evaluations_round_0.clone(), - fri_parameters.localization_parameters[1], - alphas[1], - ); - - let (expected_domain_final, evaluations_final) = FRIProver::interactive_phase_single_round( - domain_round_1, - evaluations_round_1.clone(), - fri_parameters.localization_parameters[2], - alphas[2], - ); - - // verifier prepare queries - let rand_coset_index = 31; - let (query_cosets, query_indices, domain_final) = - FRIVerifier::prepare_query(rand_coset_index, &fri_parameters); - assert_eq!(query_indices.len(), 3); - assert_eq!(domain_final, expected_domain_final); - - // prover generate answers to queries - let (indices, qi) = domain_input.query_position_to_coset( - query_indices[0], - fri_parameters.localization_parameters[0] as usize, - ); - let answer_input: Vec<_> = indices.iter().map(|&i| evaluations_input[i]).collect(); - assert_eq!(qi, query_cosets[0]); - - let (indices, q0) = domain_round_0.query_position_to_coset( - query_indices[1], - fri_parameters.localization_parameters[1] as usize, - ); - let answer_round_0: Vec<_> = indices.iter().map(|&i| evaluations_round_0[i]).collect(); - assert_eq!(q0, query_cosets[1]); - - let (indices, q1) = domain_round_1.query_position_to_coset( - query_indices[2], - fri_parameters.localization_parameters[2] as usize, - ); - let answer_round_1: Vec<_> = indices.iter().map(|&i| evaluations_round_1[i]).collect(); - assert_eq!(q1, query_cosets[2]); - - // sanity check: answer_round_i interpolate version contained in answer_round_i+1 - assert!(answer_round_0.contains(&qi.interpolate(answer_input.clone()).evaluate(&alphas[0]))); - assert!( - answer_round_1.contains(&q0.interpolate(answer_round_0.clone()).evaluate(&alphas[1])) - ); - assert!(evaluations_final - .contains(&q1.interpolate(answer_round_1.clone()).evaluate(&alphas[2]))); - - let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum(); - let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor; - let final_polynomial = DirectLDT::generate_low_degree_coefficients( - domain_final, - evaluations_final, - final_poly_degree_bound as usize, - ); - - // verifier verifies consistency - let result = FRIVerifier::consistency_check( - &fri_parameters, - &query_indices, - &query_cosets, - &vec![answer_input, answer_round_0, answer_round_1], - &alphas, - &domain_final, - &final_polynomial, - ); - - assert!(result) - } -} diff --git a/src/ldt.rs b/src/ldt.rs new file mode 100644 index 0000000..9d1c814 --- /dev/null +++ b/src/ldt.rs @@ -0,0 +1,23 @@ +use ark_ff::FftField; + +pub trait Prover { + type Proof; + type ProverConfig; + type Witness; + fn new(prover_config: Self::ProverConfig) -> Self; + fn prove(&self, witness: &Self::Witness) -> Self::Proof; +} +pub trait Verifier { + type Statement; + type Proof; + type VerifierConfig; + fn new(verifier_config: Self::VerifierConfig) -> Self; + fn verify(&self, commitment: &Self::Statement, proof: &Self::Proof) -> bool; +} +pub trait LowDegreeTest { + type LDTConfig; + type Proof; + type Prover; + type Verifier; + fn new(ldt_config: Self::LDTConfig) -> (Self::Prover, Self::Verifier); +} diff --git a/src/lib.rs b/src/lib.rs index 2154ce1..81ea8f6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,25 +1,10 @@ #![cfg_attr(not(feature = "std"), no_std)] - -//! A crate for low-degree tests. -#![deny( - future_incompatible, - missing_docs, - non_shorthand_field_patterns, - renamed_and_removed_lints, - rust_2018_idioms, - stable_features, - trivial_casts, - trivial_numeric_casts, - unused, - variant_size_differences, - warnings -)] -#![forbid(unsafe_code)] - -/// Direct low-degree tests pub mod direct; - -/// Domain represented as coset. pub mod domain; -/// Implementations for FRI Protocol -pub mod fri; +pub mod ldt; +pub mod poly_utils; +pub mod statement; +pub mod stir; +pub mod test_helpers; +pub mod utils; +pub mod witness; diff --git a/src/poly_utils/bs08.rs b/src/poly_utils/bs08.rs new file mode 100644 index 0000000..4711539 --- /dev/null +++ b/src/poly_utils/bs08.rs @@ -0,0 +1,154 @@ +use ark_ff::Field; +use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial, Polynomial}; + +#[cfg(not(feature = "std"))] +use ark_std::{vec, vec::Vec}; + +use crate::utils; + +pub struct BivariatePolynomial(pub Vec>); + +// Takes a polynomial and interprets it as a matrix of coefficients +// this exactly corresponds to computing the BS08 bivariate polynomial with +// q(X) = X^cols +pub fn to_coefficient_matrix( + f: &DensePolynomial, + rows: usize, + cols: usize, +) -> BivariatePolynomial { + if f.degree() + 1 > rows * cols { + panic!("Degree of polynomial is too large for matrix"); + } + + let mut matrix = vec![vec![F::ZERO; cols]; rows]; + + for (i, coeff) in f.coeffs.iter().enumerate() { + matrix[i / cols][i % cols] = *coeff; + } + + BivariatePolynomial(matrix) +} + +impl BivariatePolynomial +where + F: Field, +{ + pub fn degree_x(&self) -> usize { + self.rows() - 1 + } + + pub fn rows(&self) -> usize { + self.0.len() + } + + pub fn degree_y(&self) -> usize { + self.cols() - 1 + } + + pub fn cols(&self) -> usize { + self.0[0].len() + } + + pub fn evaluate(&self, x: F, y: F) -> F { + let mut res = F::zero(); + for row in 0..self.rows() { + for col in 0..self.cols() { + res += self.0[row][col] * x.pow([row as u64]) * y.pow([col as u64]); + } + } + res + } + + pub fn fold_by_col(&self, alpha: F) -> DensePolynomial { + let transposed = utils::transpose(self.0.clone()); + + let mut res = DensePolynomial::from_coefficients_vec(vec![]); + + let mut pow = F::ONE; + for c in transposed { + res += &DensePolynomial::from_coefficients_vec(c.iter().map(|f| pow * f).collect()); + pow *= alpha; + } + + res + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_helpers::fields::Field64 as TestField; + use ark_ff::AdditiveGroup; + use ark_std::rand::Rng; + + fn test_bivariate( + poly: &DensePolynomial, + matrix: &BivariatePolynomial, + cols: usize, + ) { + let mut rng = ark_std::test_rng(); + + let point: TestField = rng.gen(); + assert_eq!( + poly.evaluate(&point), + matrix.evaluate(point.pow([cols as u64]), point) + ); + } + + #[test] + fn neat_example() { + let poly = DensePolynomial::from_coefficients_vec(vec![ + TestField::from(0), + TestField::from(1), + TestField::from(2), + TestField::from(3), + TestField::from(4), + TestField::from(5), + ]); + let matrix = to_coefficient_matrix(&poly, 3, 2); + + for r in 0..3 { + for c in 0..2 { + assert_eq!(matrix.0[r][c], TestField::from((2 * r + c) as u8)); + } + } + test_bivariate(&poly, &matrix, 2); + } + + #[test] + fn shorter_than_expected() { + let poly = DensePolynomial::from_coefficients_vec(vec![ + TestField::from(0), + TestField::from(1), + TestField::from(2), + TestField::from(3), + TestField::from(4), + TestField::from(5), + ]); + let matrix = to_coefficient_matrix(&poly, 4, 2); + + for r in 0..3 { + for c in 0..2 { + assert_eq!(matrix.0[r][c], TestField::from((2 * r + c) as u8)); + } + } + for c in 0..2 { + assert_eq!(matrix.0[3][c], TestField::ZERO); + } + test_bivariate(&poly, &matrix, 2); + } + + #[test] + #[should_panic] + fn longer_than_expected() { + let poly = DensePolynomial::from_coefficients_vec(vec![ + TestField::from(0), + TestField::from(1), + TestField::from(2), + TestField::from(3), + TestField::from(4), + TestField::from(5), + ]); + let _matrix = to_coefficient_matrix(&poly, 2, 2); + } +} diff --git a/src/poly_utils/folding.rs b/src/poly_utils/folding.rs new file mode 100644 index 0000000..75c506e --- /dev/null +++ b/src/poly_utils/folding.rs @@ -0,0 +1,65 @@ +use ark_ff::Field; +use ark_poly::{univariate::DensePolynomial, Polynomial}; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use crate::poly_utils::interpolation; + +use super::bs08; + +pub fn poly_fold( + f: &DensePolynomial, + folding_factor: usize, + folding_randomness: F, +) -> DensePolynomial { + let degree = f.degree() + 1; + let q_poly = bs08::to_coefficient_matrix(f, degree.div_ceil(folding_factor), folding_factor); + q_poly.fold_by_col(folding_randomness) +} + +// f_answers is a vector containg B_l and f(B_l) for l the evaluation point +// Recall that B_l has x \in B_l \iff x^k = l +pub fn fold(f_answers: Vec<(F, F)>, folding_factor: usize, folding_randomness: F) -> F { + assert_eq!(f_answers.len(), folding_factor); + interpolation::evaluate_interpolation(f_answers.iter(), folding_randomness) +} + +#[cfg(test)] +mod tests { + use ark_ff::FftField; + use ark_poly::DenseUVPolynomial; + + use super::*; + use crate::test_helpers::fields::Field64 as TestField; + + #[test] + fn test_folding() { + let mut rng = ark_std::test_rng(); + let poly = DensePolynomial::rand(16, &mut rng); + + let folding_factor = 2; + let folding_randomness = TestField::from(5); + + let poly_fold = poly_fold(&poly, folding_factor, folding_randomness); + + let root_of_unity = TestField::get_root_of_unity(256).unwrap(); + + let evalpoint = root_of_unity.pow([folding_factor as u64]); + let beta_l = &[root_of_unity, root_of_unity.pow([1 + 128])]; + + for beta in beta_l { + assert_eq!(beta.pow([folding_factor as u64]), evalpoint,); + } + + let f_answers = beta_l + .iter() + .map(|x| (*x, poly.evaluate(x))) + .collect::>(); + + assert_eq!( + poly_fold.evaluate(&evalpoint), + fold(f_answers, folding_factor, folding_randomness) + ); + } +} diff --git a/src/poly_utils/interpolation.rs b/src/poly_utils/interpolation.rs new file mode 100644 index 0000000..028e693 --- /dev/null +++ b/src/poly_utils/interpolation.rs @@ -0,0 +1,268 @@ +use ark_ff::{batch_inversion, FftField, Field}; +use ark_poly::{ + univariate::DensePolynomial, DenseUVPolynomial, Evaluations, Polynomial, Radix2EvaluationDomain, +}; + +#[cfg(not(feature = "std"))] +use ark_std::{vec, vec::Vec}; + +use crate::utils; + +// Computes a polynomial that vanishes on points +pub fn vanishing_poly<'a, F: Field>(points: impl IntoIterator) -> DensePolynomial { + // Compute the denominator (which is \prod_a(x - a)) + let mut vanishing_poly: DensePolynomial<_> = + DensePolynomial::from_coefficients_slice(&[F::ONE]); + for a in points { + vanishing_poly = + vanishing_poly.naive_mul(&DensePolynomial::from_coefficients_slice(&[-*a, F::ONE])); + } + vanishing_poly +} + +// Computes a polynomial that interpolates the given points with the given answers +pub fn naive_interpolation<'a, F: Field>( + points: impl IntoIterator, +) -> DensePolynomial { + let points: Vec<_> = points.into_iter().collect(); + let vanishing_poly = vanishing_poly(points.iter().map(|(a, _)| a)); + + // Compute the ans polynomial (this is just a naive interpolation) + let mut ans_polynomial = DensePolynomial::from_coefficients_slice(&[]); + for (a, eval) in points.iter() { + // Computes the vanishing (apart from x - a) + let vanishing_adjusted = + &vanishing_poly / &DensePolynomial::from_coefficients_slice(&[-*a, F::ONE]); + + // Now, we can scale to get the right weigh + let scale_factor = *eval / vanishing_adjusted.evaluate(a); + ans_polynomial = ans_polynomial + + DensePolynomial::from_coefficients_vec( + vanishing_adjusted + .iter() + .map(|x| *x * scale_factor) + .collect(), + ); + } + ans_polynomial +} + +// Given a generator and a coset offset, computes the interpolating offset +pub fn fft_interpolate_naive<'a, F: FftField>( + generator: F, + coset_offset: F, + points: impl IntoIterator, +) -> DensePolynomial { + let points: Vec<_> = points.into_iter().cloned().collect(); + let folding_factor = points.len(); + assert!(utils::is_power_of_two(folding_factor)); + + let size_as_field_element = F::from(folding_factor as u64); + + // Do some batch inversion + let mut to_invert = vec![size_as_field_element, coset_offset, generator]; + batch_inversion(&mut to_invert); + let size_inv = to_invert[0]; + let coset_offset_inv = to_invert[1]; + let generator_inv = to_invert[2]; + + let domain = Radix2EvaluationDomain { + size: folding_factor as u64, + log_size_of_group: folding_factor.ilog2(), + size_as_field_element, + size_inv, + group_gen: generator, + group_gen_inv: generator_inv, + offset: coset_offset, + offset_inv: coset_offset_inv, + offset_pow_size: coset_offset.pow([folding_factor as u64]), + }; + + let evaluations = Evaluations::from_vec_and_domain(points, domain); + + evaluations.interpolate() +} + +// Given a generator and a coset offset, computes the interpolating offset +// Requires to be given the inversion of the generator and coset offset (and thus can be more +// efficient) +pub fn fft_interpolate<'a, F: FftField>( + generator: F, + coset_offset: F, + generator_inv: F, + coset_offset_inv: F, + size_inv: F, + points: impl IntoIterator, +) -> DensePolynomial { + let points: Vec<_> = points.into_iter().cloned().collect(); + let folding_factor = points.len(); + assert!(utils::is_power_of_two(folding_factor)); + + let size_as_field_element = F::from(folding_factor as u64); + + let domain = Radix2EvaluationDomain { + size: folding_factor as u64, + log_size_of_group: folding_factor.ilog2(), + size_as_field_element, + size_inv, + group_gen: generator, + group_gen_inv: generator_inv, + offset: coset_offset, + offset_inv: coset_offset_inv, + offset_pow_size: coset_offset.pow([folding_factor as u64]), + }; + + let evaluations = Evaluations::from_vec_and_domain(points, domain); + + evaluations.interpolate() +} + +// Computes a polynomial that interpolates the given points with the given answers +pub fn evaluate_interpolation<'a, F: Field>( + points: impl IntoIterator, + point: F, +) -> F { + let points = points.into_iter().collect::>(); + + for (p, a) in points.iter() { + if p == &point { + return *a; + } + } + + let denominators: Vec<_> = points + .iter() + .map(|(p, _)| p) + .enumerate() + .map(|(i, xi)| { + points + .iter() + .map(|(p, _)| p) + .enumerate() + .filter(|(j, _)| &i != j) + .map(|(_, xj)| *xi - *xj) + .product::() + }) + .collect(); + + // Do a batch inversion + let mut denominators = points + .iter() + .zip(denominators) + .map(|((xi, _), d)| d * (point - xi)) + .collect::>(); + batch_inversion(&mut denominators); + + let res: F = points + .iter() + .zip(denominators) + .map(|((_, a), d)| *a * d) + .sum(); + + res * points.iter().map(|(xi, _)| point - xi).product::() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{domain::Domain, test_helpers::fields::Field64 as TestField}; + use ark_ff::AdditiveGroup; + use ark_poly::domain::EvaluationDomain; + + #[test] + fn test_ans_polynomial() { + let points = vec![ + (TestField::from(5), TestField::from(10)), + (TestField::from(9), TestField::from(7)), + ]; + let ans_poly = naive_interpolation(&points); + for (x, y) in points.clone() { + assert_eq!(ans_poly.evaluate(&x), y); + assert_eq!(evaluate_interpolation(&points, x), y); + } + + let ood_point = TestField::from(4999); + + assert_eq!( + ans_poly.evaluate(&ood_point), + evaluate_interpolation(&points, ood_point) + ); + } + + #[test] + fn test_fft_interpolate() { + let degree = 16; + let folding_factor = 8; + let polynomial = DensePolynomial::from_coefficients_vec(vec![TestField::from(1); degree]); + let domain = Domain::::new(degree, 3).unwrap(); + let evals = polynomial + .evaluate_over_domain_by_ref(domain.backing_domain) + .evals; + let elements = domain.backing_domain.elements().collect::>(); + let reshaped_elements = utils::stack_evaluations(elements, folding_factor); + let reshaped_evaluations = utils::stack_evaluations(evals, folding_factor); + + // Computed using the naive interpolation + let g_evaluations: Vec<_> = reshaped_evaluations + .iter() + .enumerate() + .map(|(i, evals)| { + let interpol = evals + .iter() + .enumerate() + .map(|(j, &e)| (reshaped_elements[i][j], e)) + .collect::>(); + + naive_interpolation(&interpol) + }) + .collect(); + + let generator = domain + .backing_domain + .element(domain.size() / folding_factor); + let g_fft_evaluations: Vec<_> = reshaped_evaluations + .iter() + .enumerate() + .map(|(i, evals)| { + let coset_offset = domain.backing_domain.element(i); + fft_interpolate_naive(generator, coset_offset, evals) + }) + .collect(); + + let generator_inv = generator.inverse().unwrap(); + let size_inv = TestField::from(folding_factor as u64).inverse().unwrap(); + let g_fft_fast_evaluations: Vec<_> = reshaped_evaluations + .iter() + .enumerate() + .map(|(i, evals)| { + let coset_offset = domain.backing_domain.element(i); + let coset_offset_inv = domain.backing_domain.element(domain.size() - i); + fft_interpolate( + generator, + coset_offset, + generator_inv, + coset_offset_inv, + size_inv, + evals, + ) + }) + .collect(); + + assert_eq!(g_evaluations, g_fft_evaluations); + assert_eq!(g_evaluations, g_fft_fast_evaluations); + } + + #[test] + fn test_vanishing_poly() { + let points = vec![ + TestField::from(5), + TestField::from(10), + TestField::from(9), + TestField::from(7), + ]; + let vanishing_poly = vanishing_poly(&points); + for x in points { + assert_eq!(vanishing_poly.evaluate(&x), TestField::ZERO); + } + } +} diff --git a/src/poly_utils/mod.rs b/src/poly_utils/mod.rs new file mode 100644 index 0000000..3f808e8 --- /dev/null +++ b/src/poly_utils/mod.rs @@ -0,0 +1,4 @@ +pub mod bs08; +pub mod folding; +pub mod interpolation; +pub mod quotient; diff --git a/src/poly_utils/quotient.rs b/src/poly_utils/quotient.rs new file mode 100644 index 0000000..7101563 --- /dev/null +++ b/src/poly_utils/quotient.rs @@ -0,0 +1,98 @@ +use ark_ff::{FftField, Field}; +use ark_poly::{univariate::DensePolynomial, Polynomial}; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use super::interpolation; + +// Compute the quotient +pub fn poly_quotient(poly: &DensePolynomial, points: &[F]) -> DensePolynomial { + let evaluations: Vec<_> = points.iter().map(|x| (*x, poly.evaluate(x))).collect(); + let ans_polynomial = interpolation::naive_interpolation(evaluations.iter()); + let vanishing_poly = interpolation::vanishing_poly(points); + let numerator = poly + &ans_polynomial; + + // TODO: Is this efficient or should FFT? + &numerator / &vanishing_poly +} + +// This is Quotient(f, S, Ans, Fill) in the paper +pub fn quotient<'a, F: Field>( + claimed_eval: F, + evaluation_point: F, + answers: impl IntoIterator, +) -> F { + let answers: Vec<_> = answers.into_iter().copied().collect(); + + // Check if the evaluation point is in the domain + for (dom, _) in answers.iter() { + if evaluation_point == *dom { + panic!("Evaluation point is in the domain"); + } + } + // Now, compute the ans polynomial + let ans_polynomial = interpolation::naive_interpolation(&answers); + let ans_eval = ans_polynomial.evaluate(&evaluation_point); + + let num = claimed_eval - ans_eval; + let denom = answers + .iter() + .map(|x| evaluation_point - x.0) + .product::(); + + num * denom.inverse().unwrap() +} + +// Allows to amortize the evaluation of the quotient polynomial +pub fn quotient_with_hint<'a, F: Field>( + claimed_eval: F, + evaluation_point: F, + quotient_set: impl IntoIterator, + //ans_polynomial: &DensePolynomial, + denom_hint: F, + ans_eval: F, +) -> F { + let quotient_set: Vec<_> = quotient_set.into_iter().copied().collect(); + + // Check if the evaluation point is in the domain + for dom in quotient_set.iter() { + if evaluation_point == *dom { + panic!("Evaluation point is in the domain"); + } + } + + let num = claimed_eval - ans_eval; + + num * denom_hint +} + +#[cfg(test)] +mod tests { + use ark_poly::DenseUVPolynomial; + use ark_std::rand::Rng; + use ark_std::vec; + + use super::*; + use crate::test_helpers::fields::Field64 as TestField; + + #[test] + fn test_quotient() { + let mut rng = ark_std::test_rng(); + + let poly = DensePolynomial::rand(10, &mut rng); + let points = vec![TestField::from(0), TestField::from(1)]; + + let quotient_poly = poly_quotient(&poly, &points); + let ans = points + .iter() + .map(|x| (*x, poly.evaluate(x))) + .collect::>(); + + let test_point = rng.gen(); // Test at random point + assert_eq!( + quotient(poly.evaluate(&test_point), test_point, &ans,), + quotient_poly.evaluate(&test_point) + ); + } +} diff --git a/src/statement/mod.rs b/src/statement/mod.rs new file mode 100644 index 0000000..4f34dfd --- /dev/null +++ b/src/statement/mod.rs @@ -0,0 +1,9 @@ +use ark_crypto_primitives::merkle_tree::Config as MerkleConfig; + +pub mod single; + +pub trait Statement { + type Argument; + fn new(argument: Self::Argument) -> Self; + fn commitment_digest(&self) -> M::InnerDigest; +} diff --git a/src/statement/single.rs b/src/statement/single.rs new file mode 100644 index 0000000..dad87eb --- /dev/null +++ b/src/statement/single.rs @@ -0,0 +1,20 @@ +use ark_crypto_primitives::merkle_tree::Config as MerkleConfig; + +pub struct SingleStatement +where + M: MerkleConfig, +{ + commitment_digest: M::InnerDigest, +} + +impl SingleStatement +where + M: MerkleConfig, +{ + pub fn new(commitment_digest: M::InnerDigest) -> Self { + Self { commitment_digest } + } + pub fn commitment_digest(&self) -> M::InnerDigest { + self.commitment_digest.clone() + } +} diff --git a/src/stir/config.rs b/src/stir/config.rs new file mode 100644 index 0000000..db50b7f --- /dev/null +++ b/src/stir/config.rs @@ -0,0 +1,52 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, LeafParam, TwoToOneParam}, + sponge::CryptographicSponge, +}; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +#[derive(Clone)] +pub struct STIRConfig { + pub folding_factor: usize, + pub num_rounds: usize, + pub merkle_leaf_hash_param: LeafParam, + pub merkle_two_to_one_param: TwoToOneParam, + pub num_out_of_domain_samples: usize, + pub num_proof_of_work_bits: Vec, + pub num_repetitions: Vec, + pub sponge_config: S::Config, + pub starting_degree: usize, + pub starting_rate: usize, + pub stopping_degree: usize, +} + +impl STIRConfig { + pub fn new( + folding_factor: usize, + num_rounds: usize, + merkle_leaf_hash_param: LeafParam, + merkle_two_to_one_param: TwoToOneParam, + num_out_of_domain_samples: usize, + num_proof_of_work_bits: Vec, + num_repetitions: Vec, + sponge_config: S::Config, + starting_degree: usize, + starting_rate: usize, + stopping_degree: usize, + ) -> Self { + Self { + folding_factor, + num_rounds, + merkle_leaf_hash_param, + merkle_two_to_one_param, + num_out_of_domain_samples, + num_proof_of_work_bits, + num_repetitions, + sponge_config, + starting_degree, + starting_rate, + stopping_degree, + } + } +} diff --git a/src/stir/ldt.rs b/src/stir/ldt.rs new file mode 100644 index 0000000..ca52d27 --- /dev/null +++ b/src/stir/ldt.rs @@ -0,0 +1,51 @@ +use crate::{ + ldt::{LowDegreeTest, Prover, Verifier}, + stir::{config::STIRConfig, proof::STIRProof, prover::STIRProver, verifier::STIRVerifier}, + witness::Witness, +}; +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, MerkleTree}, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::{FftField, PrimeField}; +use ark_std::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +pub struct STIR +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, + W: Witness, +{ + _field: PhantomData, + _merkle_config: PhantomData, + _sponge: PhantomData, + _witness: PhantomData, +} + +impl LowDegreeTest for STIR +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, + W: Witness, CommittedValues = Vec>> + + Clone, + W::ChallengeAnswers: Clone, +{ + type LDTConfig = STIRConfig; + type Proof = STIRProof; + type Prover = STIRProver; + type Verifier = STIRVerifier; + + fn new(config: Self::LDTConfig) -> (Self::Prover, Self::Verifier) { + ( + Self::Prover::new(config.clone()), + Self::Verifier::new(config), + ) + } +} diff --git a/src/stir/mod.rs b/src/stir/mod.rs new file mode 100644 index 0000000..7edfccb --- /dev/null +++ b/src/stir/mod.rs @@ -0,0 +1,77 @@ +pub mod config; +pub mod ldt; +pub mod proof; +pub mod prover; +pub mod prover_state; +pub mod verifier; +pub mod verifier_state; + +#[cfg(test)] +mod tests { + use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; + use ark_poly::univariate::DensePolynomial; + use ark_poly::DenseUVPolynomial; + use ark_std::test_rng; + + #[cfg(not(feature = "std"))] + use ark_std::vec; + + use crate::{ + domain::Domain, + ldt::{LowDegreeTest, Prover, Verifier}, + stir::{config::STIRConfig, ldt::STIR}, + test_helpers::{fields::Field256, fs, merkle_tree}, + witness::{ + single::{SingleWitness, SingleWitnessArgument}, + Witness, + }, + }; + + type TestField = Field256; + type TestMerkleConfig = merkle_tree::poseidon::MerkleTreeParams; + type TestSpongeConfig = PoseidonSponge; + type TestWitness = SingleWitness; + + #[test] + fn test_stir_ldt() { + // config + let mut rng = test_rng(); + let (merkle_leaf_hash_param, merkle_two_to_one_param) = + merkle_tree::poseidon::default_config::(&mut rng, 2); + let config: STIRConfig = STIRConfig { + folding_factor: 16, + num_rounds: 4, + merkle_leaf_hash_param: merkle_leaf_hash_param.clone(), + merkle_two_to_one_param: merkle_two_to_one_param.clone(), + num_out_of_domain_samples: 2, + num_proof_of_work_bits: vec![2, 2, 2, 2, 2], + num_repetitions: vec![2, 2, 2, 2, 2], + sponge_config: fs::poseidon::poseidon_test_config::(), + starting_degree: 16, + starting_rate: 8, + stopping_degree: 8, + }; + + // initialize + let (prover, verifier) = + STIR::::new(config.clone()); + + // random witness + let witness: SingleWitness = + SingleWitness::new(SingleWitnessArgument { + coeff: DensePolynomial::::rand(config.starting_degree - 1, &mut rng), + domain: Domain::::new(config.starting_degree, config.starting_rate) + .unwrap(), + folding_factor: 16, + merkle_leaf_hash_param, + merkle_two_to_one_param, + sponge_config: config.sponge_config, + }); + + // prove + let stir_proof = prover.prove(&witness); + + // verify + assert_eq!(verifier.verify(&witness.statement(), &stir_proof), true); + } +} diff --git a/src/stir/proof.rs b/src/stir/proof.rs new file mode 100644 index 0000000..e7e9077 --- /dev/null +++ b/src/stir/proof.rs @@ -0,0 +1,126 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, MultiPath}, + sponge::CryptographicSponge, +}; +use ark_ff::{batch_inversion, Field}; +use ark_poly::{univariate::DensePolynomial, Polynomial}; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use super::config::STIRConfig; + +pub struct STIRProofRound { + pub coeff: DensePolynomial, + pub challenge_answers: MultiPath, + pub challenge_values: Vec>, + pub commitment_digest: M::InnerDigest, + pub config: STIRConfig, + pub is_final_round: bool, + pub last_round_commitment_digest: M::InnerDigest, + pub out_of_domain_evaluations: Vec, + pub proof_of_work_nonce: Option, + pub shake_coeff: DensePolynomial, +} + +impl>, S: CryptographicSponge> STIRProofRound { + pub fn new( + coeff: DensePolynomial, + challenge_answers: MultiPath, + challenge_values: Vec>, + commitment_digest: M::InnerDigest, + config: STIRConfig, + is_final_round: bool, + last_round_commitment_digest: M::InnerDigest, + out_of_domain_evaluations: Vec, + proof_of_work_nonce: Option, + shake_coeff: DensePolynomial, + ) -> Self { + STIRProofRound { + coeff, + challenge_answers, + challenge_values, + commitment_digest, + config, + is_final_round, + last_round_commitment_digest, + out_of_domain_evaluations, + proof_of_work_nonce, + shake_coeff, + } + } + pub fn verify_challenge_answers(&self) -> bool { + self.challenge_answers + .verify( + &self.config.merkle_leaf_hash_param, + &self.config.merkle_two_to_one_param, + &self.last_round_commitment_digest, + self.challenge_values.clone(), + ) + .unwrap() + } + pub fn verify_quotient_answers( + &self, + quotient_answers: &Vec<(F, F)>, + shake_randomness: &F, + ) -> bool { + let ans_eval = self.coeff.evaluate(&shake_randomness); + let mut denominators: Vec = quotient_answers + .iter() + .map(|(x, _)| *shake_randomness - x) + .collect(); + batch_inversion(&mut denominators); + let shake_eval = self.shake_coeff.evaluate(&shake_randomness); + if shake_eval + != quotient_answers + .iter() + .zip(denominators) + .map(|((_, y), d)| (ans_eval - y) * d) + .sum() + { + return false; + } + true + } +} + +impl Clone for STIRProofRound +where + F: Field, + M: MerkleConfig + Clone, + S: CryptographicSponge, + S::Config: Clone, +{ + fn clone(&self) -> Self { + STIRProofRound { + coeff: self.coeff.clone(), + challenge_answers: self.challenge_answers.clone(), + challenge_values: self.challenge_values.clone(), + commitment_digest: self.commitment_digest.clone(), + config: self.config.clone(), + is_final_round: self.is_final_round, + last_round_commitment_digest: self.last_round_commitment_digest.clone(), + out_of_domain_evaluations: self.out_of_domain_evaluations.clone(), + proof_of_work_nonce: self.proof_of_work_nonce.clone(), + shake_coeff: self.shake_coeff.clone(), + } + } +} + +pub struct STIRProof { + pub rounds: Vec>, +} + +impl Clone for STIRProof +where + F: Field, + M: MerkleConfig + Clone, + S: CryptographicSponge, + S::Config: Clone, +{ + fn clone(&self) -> Self { + STIRProof { + rounds: self.rounds.clone(), + } + } +} diff --git a/src/stir/prover.rs b/src/stir/prover.rs new file mode 100644 index 0000000..68c9f8e --- /dev/null +++ b/src/stir/prover.rs @@ -0,0 +1,74 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, MerkleTree}, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::{FftField, PrimeField}; +use ark_poly::Polynomial; +use ark_std::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use crate::{ + ldt::Prover, + stir::{ + config::STIRConfig, + proof::{STIRProof, STIRProofRound}, + prover_state::STIRProverState, + }, + witness::Witness, +}; + +pub struct STIRProver +where + F: FftField, + M: MerkleConfig, + M::InnerDigest: Absorb, + S: CryptographicSponge, + W: Witness, +{ + config: STIRConfig, + _field: PhantomData, + _merkle_config: PhantomData, + _sponge: PhantomData, +} + +impl Prover for STIRProver +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, + W: Witness, CommittedValues = Vec>> + + Clone, + W::ChallengeAnswers: Clone, +{ + type Witness = W; + type ProverConfig = STIRConfig; + type Proof = STIRProof; + + fn new(config: STIRConfig) -> Self { + Self { + config, + _field: PhantomData::, + _merkle_config: PhantomData::, + _sponge: PhantomData::, + } + } + fn prove(&self, witness: &W) -> Self::Proof { + assert!(witness.coeff().degree() < self.config.starting_degree); + + let rounds: Vec> = STIRProverState::::new( + witness.domain(), + witness.commitment(), + witness.committed_values(), + self.config.clone(), + witness.coeff(), + ) + .map(|round| round.proof()) + .collect(); + + STIRProof:: { rounds } + } +} diff --git a/src/stir/prover_state.rs b/src/stir/prover_state.rs new file mode 100644 index 0000000..69d8bed --- /dev/null +++ b/src/stir/prover_state.rs @@ -0,0 +1,349 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, MerkleTree, MultiPath}, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::{FftField, PrimeField}; +use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial, EvaluationDomain, Polynomial}; + +#[cfg(not(feature = "std"))] +use ark_std::{vec, vec::Vec}; + +use crate::{ + domain::Domain, + poly_utils, + utils::{dedup, proof_of_work, squeeze_integer, stack_evaluations}, +}; + +use super::{config::STIRConfig, proof::STIRProofRound}; + +pub struct STIRProverState +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, +{ + answer_coeff: DensePolynomial, + domain: Domain, + challenge_answers: MultiPath, + challenge_values: Vec>, + challenges: Vec, + commitment: MerkleTree, + committed_values: Vec>, + config: STIRConfig, + folding_randomness: F, + last_round_commitment: MerkleTree, + last_round_committed_values: Vec>, + last_round_domain_size: usize, + out_of_domain_samples: Vec, + out_of_domain_evaluations: Vec, + proof_of_work_nonce: Option, + proximity_generator_randomness: F, + quotient_answers: Vec, + quotient_set: Vec, + round_num: usize, + shake_coeff: DensePolynomial, + sponge: S, + witness_coeff: DensePolynomial, +} + +impl STIRProverState +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, +{ + pub fn new( + domain: Domain, + commitment: MerkleTree, + committed_values: Vec>, + config: STIRConfig, + witness_coeff: DensePolynomial, + ) -> Self { + let mut sponge = S::new(&config.sponge_config); + sponge.absorb(&commitment.root()); + Self { + answer_coeff: DensePolynomial::from_coefficients_vec(vec![]), + domain: domain.clone(), + challenge_answers: MerkleTree::::new( + // this is never checked, it's meant to be the equivalent of vec![] + &config.merkle_leaf_hash_param, + &config.merkle_two_to_one_param, + &vec![vec![F::ZERO], vec![F::ZERO]], + ) + .unwrap() + .generate_multi_proof(vec![]) + .unwrap(), + challenge_values: vec![], + challenges: vec![], + commitment, + committed_values: committed_values.clone(), + config: config.clone(), + folding_randomness: sponge.squeeze_field_elements(1)[0], + last_round_domain_size: domain.size(), + last_round_commitment: MerkleTree::::new( + &config.merkle_leaf_hash_param, + &config.merkle_two_to_one_param, + &committed_values, + ) + .unwrap(), + last_round_committed_values: committed_values, + out_of_domain_samples: vec![], + out_of_domain_evaluations: vec![], + proof_of_work_nonce: None, + proximity_generator_randomness: F::one(), + quotient_answers: vec![], + quotient_set: vec![], + round_num: 0, + shake_coeff: DensePolynomial::from_coefficients_vec(vec![]), + sponge, + witness_coeff, + } + } + fn fold(&mut self) { + let folded_coeff = poly_utils::folding::poly_fold( + &self.witness_coeff.clone(), + self.config.folding_factor, + self.folding_randomness, + ); + if !self.is_final_round() { + self.last_round_domain_size = self.domain.size(); + let scaled_domain = self.domain.clone().scale_offset(2); + let evals = folded_coeff + .evaluate_over_domain_by_ref(scaled_domain.backing_domain) + .evals; + let folded_committed_values = stack_evaluations(evals, self.config.folding_factor); + self.witness_coeff = folded_coeff; + self.domain = scaled_domain; + self.last_round_committed_values = self.committed_values.clone(); + self.committed_values = folded_committed_values; + } + } + fn is_final_round(&self) -> bool { + self.round_num == self.config.num_rounds + } + pub fn proof(&self) -> STIRProofRound { + STIRProofRound { + coeff: self.answer_coeff.clone(), + challenge_values: self.challenge_values.clone(), + challenge_answers: self.challenge_answers.clone(), + commitment_digest: self.commitment.root(), + config: self.config.clone(), + is_final_round: self.is_final_round(), + last_round_commitment_digest: self.last_round_commitment.root(), + out_of_domain_evaluations: self.out_of_domain_evaluations.clone(), + proof_of_work_nonce: self.proof_of_work_nonce, + shake_coeff: self.shake_coeff.clone(), + } + } + fn sponge_absorb(&mut self, element: impl Absorb) { + self.sponge.absorb(&element); + } + fn sponge_squeeze(&mut self) -> F { + self.sponge.squeeze_field_elements(1)[0] + } + fn sponge_squeeze_multiple(&mut self, num_elements: usize) -> Vec { + self.sponge.squeeze_field_elements(num_elements) + } + fn update_challenges(&mut self) { + let (domain_size, committed_values, commitment) = match self.is_final_round() { + true => (self.domain.size(), &self.committed_values, &self.commitment), + false => ( + self.last_round_domain_size, + &self.last_round_committed_values, + &self.last_round_commitment, + ), + }; + self.challenges = + dedup((0..self.config.num_repetitions[self.round_num]).map(|_| { + squeeze_integer(&mut self.sponge, domain_size / self.config.folding_factor) + })); + self.challenge_values = self + .challenges + .iter() + .map(|index| committed_values[*index].clone()) + .collect(); + + self.challenge_answers = commitment + .generate_multi_proof(self.challenges.clone()) + .unwrap(); + } + fn update_coeffs(&mut self) { + // zip set and answers into Vec<(F, F)> + let zipped: Vec<(F, F)> = self + .quotient_set + .clone() + .into_iter() + .zip(self.quotient_answers.clone().into_iter()) + .collect(); + // answer_coeff + self.answer_coeff = poly_utils::interpolation::naive_interpolation(&zipped); + // shake_coeff + let mut shake_coeff = DensePolynomial::from_coefficients_vec(vec![]); + for (x, y) in &zipped { + let num_coeff = &self.answer_coeff - &DensePolynomial::from_coefficients_vec(vec![*y]); + let den_coeff = DensePolynomial::from_coefficients_vec(vec![-*x, F::ONE]); + shake_coeff = shake_coeff + (&num_coeff / &den_coeff); + } + self.shake_coeff = shake_coeff; + // quotient_coeff + let quotient_coeff = + poly_utils::quotient::poly_quotient(&self.witness_coeff, &self.quotient_set); + // scaling_coeff: 1 + r * x + r^2 * x^2 + ... + r^n * x^n + let scaling_coeff = DensePolynomial::from_coefficients_vec( + (0..=self.quotient_set.len()) + .map(|i| self.proximity_generator_randomness.pow([i as u64])) + .collect(), + ); + // witness_coeff + self.witness_coeff = "ient_coeff * &scaling_coeff; + } + fn update_commitment(&mut self) { + self.last_round_commitment = self.commitment.clone(); + self.commitment = MerkleTree::::new( + &self.config.merkle_leaf_hash_param, + &self.config.merkle_two_to_one_param, + &self.committed_values, + ) + .unwrap(); + // put it in the sponge + self.sponge_absorb(&self.commitment.root()); + } + fn update_folding_randomness(&mut self) { + self.folding_randomness = self.sponge_squeeze(); + } + fn update_out_of_domain_samples(&mut self) { + self.out_of_domain_samples = + self.sponge_squeeze_multiple(self.config.num_out_of_domain_samples); + self.out_of_domain_evaluations = self + .out_of_domain_samples + .iter() + .map(|point| self.witness_coeff.evaluate(point)) + .collect(); + self.sponge_absorb(&self.out_of_domain_evaluations.clone()); + } + fn update_proof_of_work(&mut self) { + self.proof_of_work_nonce = proof_of_work( + &mut self.sponge, + self.config.num_proof_of_work_bits[self.round_num], + ); + } + fn update_proximity_generator_randomness(&mut self) { + self.proximity_generator_randomness = self.sponge_squeeze(); + } + fn update_quotient_answers(&mut self) { + let stir_randomness: Vec = self + .challenges + .iter() + .map(|index| { + self.domain + .scale(self.config.folding_factor) + .element(*index) + }) + .collect(); + self.quotient_set = self + .out_of_domain_samples + .clone() + .into_iter() + .chain(stir_randomness.iter().cloned()) + .collect(); + self.quotient_answers = self + .quotient_set + .iter() + .map(|x| self.witness_coeff.evaluate(x)) + .collect(); + } + fn update_round_num(&mut self) { + self.round_num = self.round_num + 1; + } +} + +impl Iterator for STIRProverState +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, +{ + type Item = Self; + + fn next(&mut self) -> Option { + if self.round_num < self.config.num_rounds { + // Step 1: Perform fold/scale operation + self.fold(); + + if !self.is_final_round() { + // Step 2: Generate commitment on the folded stuff + self.update_commitment(); + + // Step 3: Out of domain samples + self.update_out_of_domain_samples(); + + // Step 4: Squeeze some randomness + self.update_proximity_generator_randomness(); + self.update_folding_randomness(); + } + + // Step 5: Generate challenges and answers + self.update_challenges(); + + // Step 6: Proof of work + self.update_proof_of_work(); + + if !self.is_final_round() { + // Step 7: Squeeze more randomness (used by only verifier) + let _shake_randomness: F = self.sponge_squeeze(); + + // Step 6: Generate quotient set and answers + self.update_quotient_answers(); + + // Step 7: Compute coeffs + self.update_coeffs(); + + // Step 8: Increment + self.update_round_num(); + } + Some(self.clone()) + } else { + None + } + } +} + +impl Clone for STIRProverState +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, +{ + fn clone(&self) -> Self { + STIRProverState { + answer_coeff: self.answer_coeff.clone(), + domain: self.domain.clone(), + challenge_answers: self.challenge_answers.clone(), + challenge_values: self.challenge_values.clone(), + challenges: self.challenges.clone(), + commitment: self.commitment.clone(), + committed_values: self.committed_values.clone(), + config: self.config.clone(), + folding_randomness: self.folding_randomness, + last_round_commitment: self.last_round_commitment.clone(), + last_round_committed_values: self.last_round_committed_values.clone(), + last_round_domain_size: self.last_round_domain_size, + out_of_domain_samples: self.out_of_domain_samples.clone(), + out_of_domain_evaluations: self.out_of_domain_evaluations.clone(), + proof_of_work_nonce: self.proof_of_work_nonce, + proximity_generator_randomness: self.proximity_generator_randomness, + quotient_answers: self.quotient_answers.clone(), + quotient_set: self.quotient_set.clone(), + round_num: self.round_num, + shake_coeff: self.shake_coeff.clone(), + sponge: self.sponge.clone(), + witness_coeff: self.witness_coeff.clone(), + } + } +} diff --git a/src/stir/verifier.rs b/src/stir/verifier.rs new file mode 100644 index 0000000..3440e9c --- /dev/null +++ b/src/stir/verifier.rs @@ -0,0 +1,68 @@ +use ark_crypto_primitives::{ + merkle_tree::Config as MerkleConfig, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::{FftField, PrimeField}; +use ark_poly::Polynomial; +use ark_std::marker::PhantomData; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use crate::{ + ldt::Verifier, + statement::single::SingleStatement, + stir::{config::STIRConfig, proof::STIRProof, verifier_state::STIRVerifierState}, + witness::Witness, +}; + +pub struct STIRVerifier +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, + W: Witness, +{ + config: STIRConfig, + _field: PhantomData, + _merkle_config: PhantomData, + _sponge: PhantomData, + _witness: PhantomData, +} +impl Verifier for STIRVerifier +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, + W: Witness + Clone, + W::ChallengeAnswers: Clone, +{ + type Statement = SingleStatement; + type VerifierConfig = STIRConfig; + type Proof = STIRProof; + + fn new(config: STIRConfig) -> Self { + Self { + config, + _field: PhantomData::, + _merkle_config: PhantomData::, + _sponge: PhantomData::, + _witness: PhantomData::, + } + } + fn verify(&self, claim: &Self::Statement, proof: &Self::Proof) -> bool { + if proof.rounds.last().unwrap().coeff.degree() + 1 > self.config.stopping_degree { + return false; + } + + STIRVerifierState::new( + self.config.clone(), + claim.commitment_digest(), + proof.clone(), + ) + .into_iter() + .all(|state| state.is_verified() == true) + } +} diff --git a/src/stir/verifier_state.rs b/src/stir/verifier_state.rs new file mode 100644 index 0000000..d662a25 --- /dev/null +++ b/src/stir/verifier_state.rs @@ -0,0 +1,551 @@ +use ark_crypto_primitives::{ + merkle_tree::Config as MerkleConfig, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::{batch_inversion, FftField, PrimeField}; +use ark_poly::{ + univariate::DensePolynomial, DenseUVPolynomial, EvaluationDomain, Polynomial, + Radix2EvaluationDomain, +}; + +#[cfg(not(feature = "std"))] +use ark_std::{vec, vec::Vec}; + +use itertools::izip; + +use crate::{ + domain::Domain, + poly_utils, + utils::{dedup, proof_of_work_verify, squeeze_integer}, +}; + +use super::{config::STIRConfig, proof::STIRProof}; + +pub struct STIRVerifierState +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, +{ + comb_randomness: F, + config: STIRConfig, + domain_gen: F, + domain_offset: F, + domain_size: usize, + folding_randomness: F, + interpolating_coeff: DensePolynomial, + is_verified: bool, // NOTE: corresponds to successful "transition" from state n - 1 --> n, hence for n = 0 set true + proof: STIRProof, + quotient_set: Vec, + root_of_unity: F, + round_num: usize, + sponge: S, +} + +impl STIRVerifierState +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, +{ + pub fn new( + config: STIRConfig, + commitment_digest: M::InnerDigest, + proof: STIRProof, + ) -> Self { + let mut sponge = S::new(&config.sponge_config); + sponge.absorb(&commitment_digest); + let folding_randomness = sponge.squeeze_field_elements(1)[0]; + + let domain = Domain::::new(config.starting_degree, config.starting_rate).unwrap(); + + let domain_gen = domain.element(1); + let domain_size = domain.size(); + Self { + comb_randomness: F::zero(), + config, + domain_gen, + domain_offset: F::one(), + domain_size, + folding_randomness, + proof, + interpolating_coeff: DensePolynomial::from_coefficients_vec(vec![]), + is_verified: true, + quotient_set: vec![], + root_of_unity: domain_gen, + round_num: 0, + sponge, + } + } + fn answer_evaluations( + &self, + coset_offsets: Vec, + coset_offsets_inv: Vec, + generator: F, + generator_inv: F, + interpolating_coeff: DensePolynomial, + size: F, + size_inv: F, + ) -> Vec> { + coset_offsets + .iter() + .zip(&coset_offsets_inv) + .map(|(coset_offset, coset_offset_inv)| match self.round_num { + 0 => vec![F::ONE; self.config.folding_factor], + _ => { + let domain = Radix2EvaluationDomain { + size: self.config.folding_factor as u64, + log_size_of_group: self.config.folding_factor.ilog2(), + size_as_field_element: size, + size_inv, + group_gen: generator, + group_gen_inv: generator_inv, + offset: *coset_offset, + offset_inv: *coset_offset_inv, + offset_pow_size: coset_offset.pow([self.config.folding_factor as u64]), + }; + interpolating_coeff + .clone() + .evaluate_over_domain(domain) + .evals + } + }) + .collect() + } + fn common_factors(&self, query_sets: Vec>) -> Vec> { + let common_factor_scale = self.comb_randomness; + query_sets + .into_iter() + .map(|query_set| { + query_set + .into_iter() + .map(|entry| F::ONE - common_factor_scale * entry) + .collect() + }) + .collect() + } + fn coset_offsets(&self, randomness_indices: Vec) -> Vec { + randomness_indices + .iter() + .map(|stir_randomness_index| { + self.domain_offset * self.domain_gen.pow([*stir_randomness_index as u64]) + }) + .collect() + } + fn denominators(&self, query_sets: Vec>, quotient_set: Vec) -> Vec> { + query_sets + .iter() + .map(|query_set| match self.round_num { + 0 => vec![F::ONE; query_set.len()], + _ => query_set + .iter() + .map(|eval_point| quotient_set.iter().map(|x| *eval_point - x).product::()) + .collect(), + }) + .collect() + } + fn folded_answers( + &self, + answer_evaluations: &Vec>, + common_factors_inv: &Vec>, + coset_offsets: &Vec, + coset_offsets_inv: &Vec, + denominators_inv: &Vec>, + domain_gen: F, + domain_offset: F, + generator: F, + generator_inv: F, + oracle_answers: Vec>, + query_sets: &Vec>, + randomness_indices: &Vec, + size_inv: F, + ) -> Vec<(F, F)> { + let scaled_offset = domain_offset.pow([self.config.folding_factor as u64]); + let lil_map = izip!( + 0.., + randomness_indices, + coset_offsets, + coset_offsets_inv, + query_sets, + common_factors_inv, + denominators_inv, + answer_evaluations + ); + lil_map + .map( + |( + index, + randomness_index, + coset_offset, + coset_offset_inv, + query_set, + common_factors_inv, + denominators_inv, + evaluation_of_ans, + )| { + // This is the point that we are querying at + let stir_randomness = scaled_offset + * domain_gen.pow([(self.config.folding_factor * randomness_index) as u64]); + let f_answers: Vec<_> = query_set + .into_iter() + .enumerate() + .map(|(j, x)| { + self.query( + *x, + oracle_answers[index][j], + common_factors_inv[j], + denominators_inv[j], + evaluation_of_ans[j], + ) + }) + .collect(); + // This is the folding + let folded_answer = poly_utils::interpolation::fft_interpolate( + generator, + *coset_offset, + generator_inv, + *coset_offset_inv, + size_inv, + &f_answers, + ) + .evaluate(&self.folding_randomness); + + // Return the folded answer + (stir_randomness, folded_answer) + }, + ) + .collect() + } + fn folded_evaluations( + &self, + randomness_indices: Vec, + oracle_answers: Vec>, + ) -> Vec<(F, F)> { + // Step 1: Generator + let generator: F = self.generator(); + + // Step 2: Coset offsets + let coset_offsets: Vec = self.coset_offsets(randomness_indices.clone()); + + // Step 3: Query sets + let query_sets: Vec> = self.query_sets(coset_offsets.clone(), generator); + + // Step 4: Common Factors + let common_factors: Vec> = self.common_factors(query_sets.clone()); + + // Step 5: Denominators + let denominators = self.denominators(query_sets.clone(), self.quotient_set.clone()); + + // Step 6:Invert + let ( + common_factors_inv, + coset_offsets_inv, + denominators_inv, + generator_inv, + size, + size_inv, + ) = self.invert( + common_factors.clone(), + coset_offsets.clone(), + denominators, + generator, + ); + + // Step 7: Answer evaluations + let answer_evaluations = self.answer_evaluations( + coset_offsets.clone(), + coset_offsets_inv.clone(), + generator, + generator_inv, + self.interpolating_coeff.clone(), + size, + size_inv, + ); + + // Step 8: Folded answer + self.folded_answers( + &answer_evaluations, + &common_factors_inv, + &coset_offsets, + &coset_offsets_inv, + &denominators_inv, + self.domain_gen, + self.domain_offset, + generator, + generator_inv, + oracle_answers, + &query_sets, + &randomness_indices, + size_inv, + ) + } + fn generator(&self) -> F { + let scaling_factor = self.domain_size / self.config.folding_factor; + self.domain_gen.pow([scaling_factor as u64]) + } + fn invert( + &self, + common_factors: Vec>, + coset_offsets: Vec, + denominators: Vec>, + generator: F, + ) -> (Vec>, Vec, Vec>, F, F, F) { + let mut to_invert: Vec = common_factors + .iter() + .flatten() + .chain(denominators.iter().flatten()) + .chain(coset_offsets.iter()) + .cloned() + .collect(); + to_invert.push(generator); + let size = F::from(self.config.folding_factor as u64); + to_invert.push(size); + batch_inversion(&mut to_invert); + let size_inv = to_invert.pop().unwrap(); + let generator_inv = to_invert.pop().unwrap(); + let coset_offsets_inv = to_invert.split_off(to_invert.len() - coset_offsets.len()); + let common_factors_len = common_factors.len(); + let chunked: Vec> = to_invert + .chunks(self.config.folding_factor) + .map(|x| x.to_vec()) + .collect(); + let common_factors_inv = chunked[..common_factors_len].to_vec(); + let denominators_inv = chunked[common_factors_len..].to_vec(); + ( + common_factors_inv, + coset_offsets_inv, + denominators_inv, + generator_inv, + size, + size_inv, + ) + } + pub fn is_verified(&self) -> bool { + self.is_verified + } + // TODO: Nuke this + fn query( + &self, + evaluation_point: F, + value_of_prev_oracle: F, + common_factors_inverse: F, + denom_hint: F, + ans_eval: F, + ) -> F { + match &self.round_num { + 0 => value_of_prev_oracle, // In case this is the initial function, we just return the value of the previous oracle + _ => { + let num_terms = self.quotient_set.len(); + let quotient_evaluation = poly_utils::quotient::quotient_with_hint( + value_of_prev_oracle, + evaluation_point, + &self.quotient_set, + denom_hint, + ans_eval, + ); + + let common_factor = evaluation_point * self.comb_randomness; + + let scale_factor = if common_factor != F::ONE { + (F::ONE - common_factor.pow([(num_terms + 1) as u64])) * common_factors_inverse + } else { + F::from((num_terms + 1) as u64) + }; + + quotient_evaluation * scale_factor + } + } + } + fn query_sets(&self, coset_offsets: Vec, generator: F) -> Vec> { + let scales: Vec = self.scales(generator); + coset_offsets + .iter() + .map(|coset_offset| { + (0..self.config.folding_factor) + .map(|j| *coset_offset * scales[j]) + .collect::>() + }) + .collect() + } + fn quotient_answers( + &self, + challenge_values: &Vec>, + out_of_domain_randomness: &Vec, + out_of_domain_evaluations: &Vec, + randomness_indices: &Vec, + ) -> Vec<(F, F)> { + // Step 1: for random indices compute folding of previous oracle TODO: check indices? + let folded_answers: Vec<(F, F)> = + self.folded_evaluations(randomness_indices.clone(), challenge_values.clone()); + + // Step 2: + out_of_domain_randomness + .into_iter() + .zip(out_of_domain_evaluations) + .map(|(alpha, beta)| (*alpha, *beta)) + .chain(folded_answers) + .collect() + } + fn randomness( + &mut self, + commitment_digest: M::InnerDigest, + out_of_domain_evaluations: Vec, + ) -> (Vec, F, F, Vec) { + self.sponge_absorb(&commitment_digest); + let out_of_domain = self.sponge_squeeze_multiple(self.config.num_out_of_domain_samples); + self.sponge_absorb(&out_of_domain_evaluations); + let comb = self.sponge_squeeze(); + let folding = self.sponge_squeeze(); + let scaling_factor = self.domain_size / self.config.folding_factor; + let num_repetitions = self.config.num_repetitions[self.round_num]; + let indices = + dedup((0..num_repetitions).map(|_| squeeze_integer(&mut self.sponge, scaling_factor))); + (out_of_domain, comb, folding, indices) + } + fn scales(&self, generator: F) -> Vec { + let scale = generator; + let mut temp = F::ONE; + let mut scales = vec![]; + for _ in 0..self.config.folding_factor { + scales.push(temp); + temp *= scale; + } + scales + } + fn sponge_absorb(&mut self, element: impl Absorb) { + self.sponge.absorb(&element); + } + fn sponge_squeeze(&mut self) -> F { + self.sponge.squeeze_field_elements(1)[0] + } + fn sponge_squeeze_multiple(&mut self, num_elements: usize) -> Vec { + self.sponge.squeeze_field_elements(num_elements) + } + fn verify_folded_answers(&self, randomness_indices: Vec) -> bool { + let oracle_answers = self.proof.rounds.last().unwrap().challenge_values.clone(); + let folded_answers = self.folded_evaluations(randomness_indices, oracle_answers); + folded_answers + .into_iter() + .all(|(point, value)| self.proof.rounds.last().unwrap().coeff.evaluate(&point) == value) + } + fn verify_proof_of_work(&mut self, proof: &STIRProof) -> bool { + proof_of_work_verify( + &mut self.sponge, + self.config.num_proof_of_work_bits[self.config.num_rounds], + proof + .rounds + .get(self.round_num) + .unwrap() + .proof_of_work_nonce, + ) + } + fn verify_quotient_answers( + &mut self, + out_of_domain_randomness: &Vec, + randomness_indices: &Vec, + ) -> bool { + let shake_randomness = self.sponge_squeeze(); + let quotient_answers: Vec<(F, F)> = self.quotient_answers( + &self.proof.rounds[self.round_num].challenge_values, + &out_of_domain_randomness, + &self.proof.rounds[self.round_num].out_of_domain_evaluations, + &randomness_indices, + ); + self.quotient_set = quotient_answers + .clone() + .into_iter() + .map(|(x, _)| x) + .collect(); + self.proof.rounds[self.round_num] + .verify_quotient_answers("ient_answers, &shake_randomness) + } +} + +impl Iterator for STIRVerifierState +where + F: FftField + PrimeField + Absorb, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, +{ + type Item = Self; + + fn next(&mut self) -> Option { + if self.round_num < self.config.num_rounds { + // Step 1: verify challenges + self.proof.rounds[self.round_num].verify_challenge_answers(); + + // Step 2: handle some randomness + let (out_of_domain_randomness, comb_randomness, folding_randomness, randomness_indices) = + self.randomness( + self.proof.rounds[self.round_num].commitment_digest.clone(), + self.proof.rounds[self.round_num] + .out_of_domain_evaluations + .clone(), + ); + + // Step 3: proof of work + if !self.verify_proof_of_work(&self.proof.clone()) { + self.is_verified = false; + return Some(self.clone()); + } + + if !self.proof.rounds[self.round_num].is_final_round { + // Step 4: verify quotient answers + if !self.verify_quotient_answers(&out_of_domain_randomness, &randomness_indices) { + self.is_verified = false; + return Some(self.clone()); + } + } else { + // Step 5: Folded answers + if !self.verify_folded_answers(randomness_indices) { + self.is_verified = false; + return Some(self.clone()); + } + } + + // Step 6: update some state + self.comb_randomness = comb_randomness; + self.domain_gen = self.domain_gen * self.domain_gen; + self.domain_offset = self.domain_offset * self.domain_offset * self.root_of_unity; + self.domain_size = self.domain_size / 2; + self.folding_randomness = folding_randomness; + self.interpolating_coeff = self.proof.rounds[self.round_num].coeff.clone(); + self.round_num = self.round_num + 1; + + // Step 7: done + Some(self.clone()) + } else { + None + } + } +} + +impl Clone for STIRVerifierState +where + F: FftField + Clone, + M: MerkleConfig + Clone, + S: CryptographicSponge + Clone, + S::Config: Clone, +{ + fn clone(&self) -> Self { + Self { + comb_randomness: self.comb_randomness.clone(), + config: self.config.clone(), + domain_gen: self.domain_gen.clone(), + domain_offset: self.domain_offset.clone(), + domain_size: self.domain_size, + folding_randomness: self.folding_randomness.clone(), + interpolating_coeff: self.interpolating_coeff.clone(), + is_verified: self.is_verified, + proof: self.proof.clone(), + quotient_set: self.quotient_set.clone(), + root_of_unity: self.root_of_unity.clone(), + round_num: self.round_num, + sponge: self.sponge.clone(), + } + } +} diff --git a/src/test_helpers/fields.rs b/src/test_helpers/fields.rs new file mode 100644 index 0000000..5f2b219 --- /dev/null +++ b/src/test_helpers/fields.rs @@ -0,0 +1,25 @@ +use ark_ff::{Field, Fp128, Fp192, Fp64, MontBackend, MontConfig, PrimeField}; + +pub type Field256 = ark_test_curves::bls12_381::Fr; + +#[derive(MontConfig)] +#[modulus = "18446744069414584321"] +#[generator = "7"] +pub struct FrConfig64; +pub type Field64 = Fp64>; + +#[derive(MontConfig)] +#[modulus = "340282366920938463463374557953744961537"] +#[generator = "3"] +pub struct FrConfig128; +pub type Field128 = Fp128>; + +#[derive(MontConfig)] +#[modulus = "4787605948707450321761805915146316350821882368518086721537"] +#[generator = "3"] +pub struct FrConfig192; +pub type Field192 = Fp192>; + +pub fn field_size_bits() -> usize { + F::BasePrimeField::MODULUS_BIT_SIZE as usize * F::extension_degree() as usize +} diff --git a/src/test_helpers/fs/mod.rs b/src/test_helpers/fs/mod.rs new file mode 100644 index 0000000..4395ce1 --- /dev/null +++ b/src/test_helpers/fs/mod.rs @@ -0,0 +1 @@ +pub mod poseidon; diff --git a/src/test_helpers/fs/poseidon.rs b/src/test_helpers/fs/poseidon.rs new file mode 100644 index 0000000..e786f06 --- /dev/null +++ b/src/test_helpers/fs/poseidon.rs @@ -0,0 +1,1171 @@ +use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonSponge}; +use ark_ff::PrimeField; +// use poseidon_paramgen::v1::generate; + +#[cfg(not(feature = "std"))] +use ark_std::vec; + +pub type Sponge = PoseidonSponge; + +// PoseidonSponge for testing +pub fn poseidon_test_config() -> PoseidonConfig { + // initialize params + // let security_level_bits: usize = 128; + // let width_hash_function: usize = 3; // In this case, we do 2-to-1 + // let allow_inverse: bool = false; + // let poseidon_parameters = generate::( + // security_level_bits, + // width_hash_function, + // F::MODULUS, + // allow_inverse, + // ); + let alpha = 5; + let full_rounds = 8; + let partial_rounds = 56; + let mds = vec![ + vec![ + F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123009", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "39326906381344642859585805381139474378267914375395728366952744024953935888385", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31461525105075714287668644304911579502614331500316582693562195219963148710708", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "39326906381344642859585805381139474378267914375395728366952744024953935888385", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31461525105075714287668644304911579502614331500316582693562195219963148710708", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "43696562645938492066206450423488304864742127083773031518836382249948817653761", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "31461525105075714287668644304911579502614331500316582693562195219963148710708", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "43696562645938492066206450423488304864742127083773031518836382249948817653761", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "14981678621464625851270783002338847382197300714436467949315331057125308909861", + ) + .map_err(|_| ()) + .unwrap(), + ], + ]; + let ark = vec![ + vec![ + F::from_str( + "22845428001683035001757566886883286202794971213257405898653937619342210809967", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "9417748754940633550207934065289371845381781919953741119572192314859326756938", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26689524918965143061772476650114353366218076390810271464955542964810918690462", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "23667059179080355346057115978894489941696461722584637607820962465827837825273", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "46040731510256722444481170188451497748934948859158655271594666645009954130420", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38113870242218339764104342092524703512067945072549749734867314604234805824264", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "32611384898299796592751439904458085938192833715280734492851042712787358716294", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "33324362596726879582132023009108655879122993156535518227166123425490127359165", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31233555617069547075173148270170739854954869805099073326535758453459148394049", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "49586904012565111968216908241229472951281535032044747164567307267295875715026", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "2127315429566462795785821689020248617345433219758728404787310279338768885895", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "6529018108583238830114188423456393413649137858781161494022316162010318640950", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "23649163789043242678776939693475922815241122333107994583151017148206730700267", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "23982229504468820398618649637438150346315963308296786781329337326290795690104", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "591156623969703606931660477957796564969105236556604782683460292150992424027", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "30084821438110117176679719381277976601442389474785273476376177505312413978818", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "8191445513196243205449886391313722723661952074636811757767516173019594024447", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7395469702730664138851034971541308544559423038002849895231878040178489451243", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "6341026417807630730297613183042003952592308254703287399431399858814665410034", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "20652945640892549037566441607019982429364028401857035181703584312655393699470", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34824482815556921067370021614278815836412588641486739976853046658332582052879", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "48976251116034708464354192855627142326430307437063612591022448910603652475844", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "33973886383203962364416371082456886713311059913098568681589079598311290341730", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "23178695901093722025363352164825380417033560143406423973483056057902806658538", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "37639985941451408425998780104098889280755425755378537999685550579014673118383", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26775243484593836205445491219466322530651352314052529708867746286247232484014", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45192808452249683828194325900235208250060616352273105247249793679536128618560", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "20071905668670782245953495435146197909174054068927952772615658727538296929513", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29406444079001843296331241892729732794993064809431324386692390118024299913033", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27838323777556259931728341767911143308623943462495399158698183979133847887998", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "37966516969555117666235778718758211780248673468521118136255691264235287331101", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38967514381541394472121804114513934243293763636056980334619885239009397430989", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40028341421872977135388961875125188765067392991342155013375654821131464031936", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "1001613543114792173050023076800478067685833458234763459693802494435332553026", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35311269311571980681347601071503185346173815932615188417499353359668184483756", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34462018568703773567209528024867849348825919032173701573186920278058007201948", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "10262313581383491791096554537171596499065114438052122965745631817632928267916", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27992105705964906687339251318982892339131668293855140653976808119275969963868", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42462965018262714629670524480631638355909533906921865587801092326698629415353", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "731862653799218940119653655008077067672754736101148769466023549852112342198", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "4990007337841733858417996170512765349002981269633789630458334995176400108943", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40084733361186863392667730682383810731143560279010011121513673913355226479536", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "13472436169748051397435195704947919623141237809210825830289534653879222770162", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "50265394623175139483913971841917882707100028857396846929106208750084545654680", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30949636244369656784898042441618620789717441754208684137133141585230651202314", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "39349527001767045643298791216991158309532217712981217526969063621356866755244", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10201123598983809630230278570888438897128042351935922235849565841871811914458", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "23172475118582244440190512123295348171804989798503475437607943201606731741328", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "14539407523908390695568439980336876821578107531746451022879738674020080696447", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40663157753728471683236602461375503173626639949161575886034585669523632164258", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "41478194862632007866456615972070216026646814855913562507497145373244784230963", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "8622071781902469999213511033556562060366946974247448298500382057461783727407", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10830466122360977082234691006699380271235478376684567138914651532981394980126", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7232867760693339528219049901552217783200496118772426979320627096878605711592", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "35079031576108189052612014501875323358302221902936394805957983077486222384523", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "9167584557822184508030612984628817005319158702740560176792366793373157325196", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "18074610613332847447681900241221546049326446100125268088064210259992236150854", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "16517331293849824763364413300554893951203379531056781643365275271457171439509", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1089195116745468118265857623837494042159140798941646088987006397242383772274", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "17610774827548103443018122640740452953487672960426564938071840133447084592273", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "16560559120122945159055643110747776066049028486182219761206720512074050208959", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27307859881734247797242923704101058112359845830743800995003842091962029823992", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "12574849717458401184175214146191580664064712008876201865632459615475659992210", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "41543024786871098911868759261087187686116402204621994511525841195606772065593", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "32756776979566599709291671539531904499663794908005108228877494335137125655012", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "13532075261688893387142549255064455454261052957758568678924958037395894977520", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "27998635034936574887381376947571205212530065197443449727964597035616638471847", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "46844950058534215635661018375636671603134206482544045614009211691474640545183", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45526729914336495893163690991383148465568398910057609608759474521512027209057", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "33872763155222545113791047234953158863665411185433215920322248059422557784040", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29460460133995562592349301799618260583016525430602287818866026649784680483523", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "20989735902099832593496529105916714634367556088781730822973410813751407497879", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "45520909994106166183648093095035256424865964915398087578040671480508057709894", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "51725468395149610367573509613712677267589764836449813856852867186555661819360", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "15651425738690741503176348763612585457187791238335345329322432112360560461704", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "24945611636441666054318407598715861234462523499499595448517505049488110615214", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16301539278215369687299504108802130276435745980769657328958446248303035955176", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44301912086845175690540373083352281317454905399747377547078556917865150832989", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "49778396049625358995132756791358478390102266869360717476676437985801446091889", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "39153290849454135141271361494475662688916570729187492943387960313673041446148", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26991647174508986366084735074783734164113574105838673377039994017089952182486", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "2403186673978540311545640594843853543643227950077142548745613485294780855141", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10329119407814891230092277094164085309383530588695756714355495404374556885786", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "702874774050464869184650423477652304207160836554060769812462487419268335441", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "50074752288575507441174064290633923705846137616935811121806601719875594753019", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29342013954649824046585779860716199589577998264314693554503526645430239741709", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30361262010467679813283989386832242969994684207282862420161955708780601098704", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "30351766991733436072768691856661481615952834268179175645404913610213825680401", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1664790785025469129434526735675056241660553790656417175792384823268029808499", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "46322613396380948619628922350351042103212945902543833810047505753710422764619", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "19201041989805354003325674917539369487037360530331018121215975522940072895718", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40713015437441300486859919171751834181664365406786230895978883504408178991598", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "33165732938688300275094516949956529791738898002532702022139088595193131778075", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "14061271190311903391893296597687971683409755901603758686994336529066151361543", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30017115602362133544428282226988281665898062715638283950670461587514995528337", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "51753869079004912869790737342975337426219894984640345689956073613279132042928", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "33726414746454922641356272061208629386217736066083215066025761465516292002524", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35210911960966295213959369947626866692609088740419141627680657893370017484508", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "4141660360379187486674487904202902153994931348181391058833499399706861611970", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "40875507602384303877352581938122059128419080856403401999804217089730126842394", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40087000777143021841754444382736050591105162668380193612093257295149366899295", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "360624956582718333774625848521820613496494801229735030900568999941102114884", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "11477457618572262344538985253904058228123098918969349239608807377007383139313", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45254065959701514055715124638217794757142405925270580133395538824914155859795", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7358301974444199529119918346570045958313961795808719967710329208237031192970", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "12699242192147355636134649376980967434530726534868950427468890401069053166328", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "21574666344082745249505505003959311248488352938383685650327775178001000819481", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26884614465157200359785958766234836206045908165626281395333341075981676550453", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "24852179396523634535353419259325074241075443941784111871159513114358632326939", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "18211374049926652874841112832356976374552310648534087414193782959262371639828", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35375087468679890118148541757069905069645283903997802919905971779989387935312", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "10990565882246234777729800926231517373372767769410158316318619229150139473531", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "24032051352715262242401866721229748797811123047937790057119251127169973152881", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "28438207938166443253621581664511800652351092231061919031828588532652227156596", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "45687415910148401846425146916404001259507215012315057946595070426660110660811", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45110274946146660516282617054500711927725036481516113151944675392925576760894", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34137223191115457752400978731943408868071059969283171338528389319055436889307", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "32713597945226548966370311170658233211393249637894135199446742658128239554646", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "33192735799627684320353855891675583234013662286026882194028603892523478740123", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "3776516710765546122697723171053195964096694338618710275820928005602928573368", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "7944265310164359722118605517046022331921673922658843007239349330762185482392", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31715470560273718011671907461103911969141721217361313081393607676239225390364", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42597200531215255918594108726444759128389740499081765757576512781903070927748", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "38622022004327168167649998385610411001269295886008008390410138141853113112315", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "41639697610221403852836714442228562101456287247472949217445027773398868160711", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "13150428932552364822098333559189689694536801235647059449631307475679127673867", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "34373201154776600875146892219437926734069726168942010866640582754750620448511", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35279388813336423141682178743103346803927226873412975071332881859187650721328", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27774333846963600481511257662914321673673373955022726430096988470105398209714", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "35483098310244161758040046647575507323602964525852265859901382236130852700062", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29940267014590605840700764600461238078000060065790052366740674868616463896153", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25739729541991097592540752609074245193891720936529356517885412505653561292306", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "48592083802088089137279496160575479196512409111032554810647441613023130859714", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "11785199761165312817707782850381637428548166564182162215870947169062065050309", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "37737752799213701220107891469991549645342013287264825965243870418885646558575", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "42185188402563931515779080170268528213759004550497190339895558255998756484305", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29762891826290037177744093996573528328489719757759674546874936845480876153625", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "41048820922442053658536094708659956071999957589299249055158529124710069314949", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "23522761739531401519638478389533837643267162088884579912861987842834590059205", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1577795469784700584387088254598570414875555853652586256618632377936727476028", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44736559789102463530519848387460288056502543753479209858246163243414900222987", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "26124083702829644397351772362411115259535338884150178345524352910165101961197", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44068951341630081435585895581791390859877371169408252908545483578489725374863", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "17361638690026756605596598636834592222533091153902430487355356691164428488413", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "11647749325942062475038392890134560282358764855423971504181109950516197620555", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40500824589487208102914420357982839590679741935577095819591537825040671597495", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40785454914612292708630091724060381548396170012626005122221974905540756399207", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "20356565648078510551533920457384489131486812605779651499082832980671484016522", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29112015816272783569379820912763801267400393662021206741257278682896986065788", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44733525682678568645165250261910618247576685489408569315352106466147253727345", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "10565089313326829099491655443012602039407136842910355472790748905051752334527", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "17636725232687840901682379138469361259504197357613401374231593442081994684058", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "49455315856290883709107366844714705621304302274078267246639667945964681049589", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "9394008262967145993303699385193109865242274266934320660589155431516724290078", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "43900030624416944756689868922581338041945556634263488704748136303189975871142", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48191060987937123985909229203502109068831786295580349499493566153386798933792", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "43361003342366732200083697919780436439559769824804240685090356123985970020322", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "51781017630032437459807770951906705163397801283934820333816755077832398052644", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48709885480080800062931271139518814968675476404080001820094094562394087333146", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "517790256320125875817609575412307930532409010548566690252780460066482113077", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44162810324319332227049701762799226779884107012295764844978771561514939267347", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "18329654636987314320181933412859769661035966797908420315450106106967427683830", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "19615817294297319883822402917727657184715513281307347906216027590033902491654", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "41253529578447529272966952303200110041594696386420191733502268974386805490133", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44551831777999888557799092846226179853573739860014862778154450411343402505480", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "7559320320412538022925238701423240040386861903490421501028835005166431445991", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "37703931107359229792370407986681862000136875665994615587307408387674509893202", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "36981740507092948379101268207755436295900732477477035639426425826451873208221", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "32220980229047213196553181881899738269582355015949570895340236724112466067644", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "50293059130849255052300726917626827619838259588242288225714397253126361265854", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "11461804647277285059017568254865894963059032745983570268950239637841369012659", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "18013977259539479149208518875602054515978941464362999137382072720864300137360", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "24262624423049731894299705457348106409867388962133042917690439456364005006686", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "52144014954916185528618956479620036725707233503135302723071827906176892132960", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "1184581588830389264718635894468521582491343657766431963764898469843010069554", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40610686652907253784724857559452211268043806809440823202590722974294091011876", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "914146278018379374974950713477496078415253310809588124235733809681562817725", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "25097904996332603354919889504928778253424502922841324137944667824221109148627", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "17243512742922204136836426230051681478081152509764057560629791259889066934988", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "4081759897772210684578749284259535944860111230995927338347978237888465764094", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "36765065937060082811140519464443523737174704483573873520674596392742252800514", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "24321691704536604443532250865415818572294386696143821782936373725880267878369", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "2395267786872226534851212329566983083100169355414401082132646655651751509972", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "3950196104600672123800974772107750971315804388021439464175836038546084353494", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25263626252513390011901163434994070547778323834423264534286975919312362778567", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29896651629929954914209712564022862418500548724265113861327208457712342872872", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "9539335037415025952662358277279804491504129471492873246424615506400771650468", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27193798077315487970531850693078048672125444244557439962378947858653645205697", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "21395546066883486929277886107956208113265529474198702733513032754482401541525", + ) + .map_err(|_| ()) + .unwrap(), + ], + vec![ + F::from_str( + "30032609953800770008376919053783279995466549980411699863594692563988339280340", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42581228581701308758620568691477079823478678724481681987270661843757181526448", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48272706010725470294995350043440475847552808087172142257372185646688911955162", + ) + .map_err(|_| ()) + .unwrap(), + ], + ]; + let rate = 1; + let capacity = 2; + PoseidonConfig::new(full_rounds, partial_rounds, alpha, mds, ark, rate, capacity) +} diff --git a/src/test_helpers/merkle_tree/mock.rs b/src/test_helpers/merkle_tree/mock.rs new file mode 100644 index 0000000..071f19e --- /dev/null +++ b/src/test_helpers/merkle_tree/mock.rs @@ -0,0 +1,71 @@ +use ark_std::{borrow::Borrow, marker::PhantomData}; + +use ark_crypto_primitives::{ + crh::{CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{ByteDigestConverter, Config}, +}; +use ark_serialize::CanonicalSerialize; +use ark_std::rand::RngCore; + +#[cfg(not(feature = "std"))] +use ark_std::{vec, vec::Vec}; + +pub struct Mock; + +impl TwoToOneCRHScheme for Mock { + type Input = [u8]; + type Output = Vec; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _: &Self::Parameters, + _: T, + _: T, + ) -> Result { + Ok(vec![0u8; 32]) + } + + fn compress>( + _: &Self::Parameters, + _: T, + _: T, + ) -> Result { + Ok(vec![0u8; 32]) + } +} + +pub type LeafH = super::LeafIdentityHasher; +pub type CompressH = Mock; + +#[derive(Debug, Default)] +pub struct MerkleTreeParams(PhantomData); + +impl Config for MerkleTreeParams { + type Leaf = F; + + type LeafDigest = as CRHScheme>::Output; + type LeafInnerDigestConverter = ByteDigestConverter; + type InnerDigest = ::Output; + + type LeafHash = LeafH; + type TwoToOneHash = CompressH; +} + +pub fn default_config( + rng: &mut impl RngCore, + _leaf_arity: usize, +) -> ( + as CRHScheme>::Parameters, + ::Parameters, +) { + let leaf_hash_params = as CRHScheme>::setup(rng).unwrap(); + let two_to_one_params = ::setup(rng) + .unwrap() + .clone(); + + (leaf_hash_params, two_to_one_params) +} diff --git a/src/test_helpers/merkle_tree/mod.rs b/src/test_helpers/merkle_tree/mod.rs new file mode 100644 index 0000000..7cd00fe --- /dev/null +++ b/src/test_helpers/merkle_tree/mod.rs @@ -0,0 +1,70 @@ +use core::sync::atomic::{AtomicUsize, Ordering}; +use spin::{Mutex, Once}; + +pub mod mock; +pub mod poseidon; + +use ark_std::{borrow::Borrow, marker::PhantomData}; + +use ark_crypto_primitives::crh::CRHScheme; +use ark_serialize::CanonicalSerialize; +use ark_std::{rand::RngCore, vec}; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +#[derive(Debug, Default)] +pub struct HashCounter { + counter: AtomicUsize, +} + +static INIT: Once = Once::new(); +static mut HASH_COUNTER: Option> = None; + +impl HashCounter { + fn get_instance() -> &'static Mutex { + unsafe { + INIT.call_once(|| { + HASH_COUNTER = Some(Mutex::new(HashCounter::default())); + }); + HASH_COUNTER.as_ref().unwrap() + } + } + + pub(crate) fn add() -> usize { + let counter = Self::get_instance().lock(); + counter.counter.fetch_add(1, Ordering::SeqCst) + } + + pub fn reset() { + let counter = Self::get_instance().lock(); + counter.counter.store(0, Ordering::SeqCst) + } + + pub fn get() -> usize { + let counter = Self::get_instance().lock(); + counter.counter.load(Ordering::SeqCst) + } +} + +#[derive(Debug, Default)] +pub struct LeafIdentityHasher(PhantomData); + +impl CRHScheme for LeafIdentityHasher { + type Input = F; + type Output = Vec; + type Parameters = (); + + fn setup(_: &mut R) -> Result { + Ok(()) + } + + fn evaluate>( + _: &Self::Parameters, + input: T, + ) -> Result { + let mut buf = vec![]; + CanonicalSerialize::serialize_compressed(input.borrow(), &mut buf)?; + Ok(buf) + } +} diff --git a/src/test_helpers/merkle_tree/poseidon.rs b/src/test_helpers/merkle_tree/poseidon.rs new file mode 100644 index 0000000..5f0d97e --- /dev/null +++ b/src/test_helpers/merkle_tree/poseidon.rs @@ -0,0 +1,127 @@ +use ark_std::{borrow::Borrow, marker::PhantomData, vec::Vec}; + +use ark_crypto_primitives::crh::poseidon; +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_crypto_primitives::{ + crh::{CRHScheme, TwoToOneCRHScheme}, + merkle_tree::{Config, IdentityDigestConverter}, + sponge::Absorb, +}; +use ark_ff::PrimeField; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::rand::RngCore; + +use crate::test_helpers::fs; + +use super::HashCounter; + +// We need 2 field elements for security +#[derive( + Clone, Copy, Debug, Eq, PartialEq, Hash, Default, CanonicalSerialize, CanonicalDeserialize, +)] +pub struct PoseidonDigest([F; 2]); + +impl Absorb for PoseidonDigest { + fn to_sponge_bytes(&self, dest: &mut Vec) { + self.0[0].to_sponge_bytes(dest); + self.0[1].to_sponge_bytes(dest); + } + + fn to_sponge_field_elements(&self, dest: &mut Vec) { + self.0[0].to_sponge_field_elements(dest); + self.0[1].to_sponge_field_elements(dest); + } +} + +pub struct PoseidonCRH(PhantomData); + +impl CRHScheme for PoseidonCRH { + type Input = Vec; + type Output = PoseidonDigest; + type Parameters = as CRHScheme>::Parameters; + + fn setup(_rng: &mut R) -> Result { + Ok(fs::poseidon::poseidon_test_config::()) + } + + fn evaluate>( + parameters: &Self::Parameters, + input: T, + ) -> Result { + HashCounter::add(); + let mut sponge = PoseidonSponge::new(parameters); + for el in input.borrow() { + sponge.absorb(el); + } + let res = sponge.squeeze_field_elements::(2); + Ok(PoseidonDigest([res[0], res[1]])) + } +} + +pub struct PoseidonTwoToOneCRH(PhantomData); + +impl TwoToOneCRHScheme for PoseidonTwoToOneCRH { + type Input = PoseidonDigest; + type Output = PoseidonDigest; + type Parameters = as TwoToOneCRHScheme>::Parameters; + + fn setup(_rng: &mut R) -> Result { + Ok(fs::poseidon::poseidon_test_config::()) + } + + fn evaluate>( + parameters: &Self::Parameters, + left_input: T, + right_input: T, + ) -> Result { + Self::compress(parameters, left_input, right_input) + } + + fn compress>( + parameters: &Self::Parameters, + left_input: T, + right_input: T, + ) -> Result { + HashCounter::add(); + let left_input = left_input.borrow(); + let right_input = right_input.borrow(); + let mut sponge = PoseidonSponge::new(parameters); + sponge.absorb(&left_input.0[0]); + sponge.absorb(&left_input.0[1]); + sponge.absorb(&right_input.0[0]); + sponge.absorb(&right_input.0[1]); + let res = sponge.squeeze_field_elements::(2); + Ok(PoseidonDigest([res[0], res[1]])) + } +} + +pub type LeafH = PoseidonCRH; +pub type CompressH = PoseidonTwoToOneCRH; + +#[derive(Debug, Default, Clone)] +pub struct MerkleTreeParams(PhantomData); + +impl Config for MerkleTreeParams { + type Leaf = Vec; + + type LeafDigest = as CRHScheme>::Output; + type LeafInnerDigestConverter = IdentityDigestConverter>; + type InnerDigest = as TwoToOneCRHScheme>::Output; + + type LeafHash = LeafH; + type TwoToOneHash = CompressH; +} + +pub fn default_config( + rng: &mut impl RngCore, + _leaf_arity: usize, +) -> ( + as CRHScheme>::Parameters, + as TwoToOneCRHScheme>::Parameters, +) { + let leaf_hash_params = as CRHScheme>::setup(rng).unwrap(); + let two_to_one_params = as TwoToOneCRHScheme>::setup(rng).unwrap(); + + (leaf_hash_params, two_to_one_params) +} diff --git a/src/test_helpers/mod.rs b/src/test_helpers/mod.rs new file mode 100644 index 0000000..f773e69 --- /dev/null +++ b/src/test_helpers/mod.rs @@ -0,0 +1,3 @@ +pub mod fields; +pub mod fs; +pub mod merkle_tree; diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 0000000..01fb200 --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,106 @@ +use ark_std::collections::BTreeSet; + +#[cfg(not(feature = "std"))] +use ark_std::{vec, vec::Vec}; + +use ark_crypto_primitives::sponge::CryptographicSponge; + +pub fn is_power_of_two(n: usize) -> bool { + n & (n - 1) == 0 +} + +pub fn transpose(v: Vec>) -> Vec> { + assert!(!v.is_empty()); + let len = v[0].len(); + let mut iters: Vec<_> = v.into_iter().map(|n| n.into_iter()).collect(); + (0..len) + .map(|_| { + iters + .iter_mut() + .map(|n| n.next().unwrap()) + .collect::>() + }) + .collect() +} + +pub fn proof_of_work( + sponge: &mut impl CryptographicSponge, + proof_of_work_bits: usize, +) -> Option { + assert!(proof_of_work_bits <= 32); + if proof_of_work_bits == 0 { + return None; + } + + let mut buf = [0; 4]; + let mut nonce: usize = 0; + loop { + let mut new_sponge = sponge.clone(); + let nonce_bytes = nonce.to_le_bytes(); + new_sponge.absorb(&nonce_bytes.as_slice()); + let pow_bytes = new_sponge.squeeze_bytes(4); + buf.copy_from_slice(&pow_bytes[..]); + let pow = u32::from_le_bytes(buf); + if pow.trailing_zeros() as usize >= proof_of_work_bits { + sponge.absorb(&nonce_bytes.as_slice()); + sponge.squeeze_bytes(4); + return Some(nonce); + } + nonce += 1; + } +} + +pub fn proof_of_work_verify( + sponge: &mut impl CryptographicSponge, + proof_of_work_bits: usize, + pow_nonce: Option, +) -> bool { + assert!(proof_of_work_bits <= 32); + if proof_of_work_bits == 0 { + return true; + } + + if pow_nonce.is_none() { + return false; + } + let nonce = pow_nonce.unwrap(); + sponge.absorb(&nonce.to_le_bytes().as_slice()); + let pow_bytes = sponge.squeeze_bytes(4); + let mut buf = [0; 4]; + buf.copy_from_slice(&pow_bytes[..]); + let pow = u32::from_le_bytes(buf); + pow.trailing_zeros() as usize >= proof_of_work_bits +} + +pub fn squeeze_integer(sponge: &mut impl CryptographicSponge, range: usize) -> usize { + assert!(is_power_of_two(range)); + let mut bytes_array = [0; 8]; + let bytes = sponge.squeeze_bytes(8); + bytes_array.copy_from_slice(&bytes); + let candidate = usize::from_le_bytes(bytes_array); + // This is uniform as long as the range is a power of two + candidate % range +} + +// Deduplicates AND orders a vector +pub fn dedup(v: impl IntoIterator) -> Vec { + Vec::from_iter(BTreeSet::from_iter(v)) +} + +// Takes the vector of evaluations (assume that evals[i] = f(omega^i)) +// and folds them into a vector of such that folded_evals[i] = [f(omega^(i + k * j)) for j in 0..folding_factor] +pub fn stack_evaluations(evals: Vec, folding_factor: usize) -> Vec> { + assert!(evals.len() % folding_factor == 0); + let size_of_new_domain = evals.len() / folding_factor; + + let mut stacked_evaluations = vec![]; + for i in 0..size_of_new_domain { + let mut new_evals = vec![]; + for j in 0..folding_factor { + new_evals.push(evals[i + j * size_of_new_domain]); + } + stacked_evaluations.push(new_evals); + } + + stacked_evaluations +} diff --git a/src/witness/mod.rs b/src/witness/mod.rs new file mode 100644 index 0000000..12e7785 --- /dev/null +++ b/src/witness/mod.rs @@ -0,0 +1,33 @@ +use ark_crypto_primitives::merkle_tree::Config as MerkleConfig; +use ark_ff::FftField; +use ark_poly::univariate::DensePolynomial; + +use crate::domain::Domain; + +pub mod single; + +pub trait Witness { + type Argument; + type Commitment; + type Challenges; + type ChallengeAnswers; + type Statement; + type CommittedValues; + type MerkleConfig; + + fn new(argument: Self::Argument) -> Self; + fn coeff(&self) -> DensePolynomial; + fn coeff_degree(&self) -> usize; + fn commitment(&self) -> Self::Commitment; + fn commitment_digest(&self) -> M::InnerDigest; + fn committed_values(&self) -> Self::CommittedValues; + fn challenges(&self, num_challenges: usize) -> Self::Challenges; + fn challenge_answers(&self, challenges: Self::Challenges) -> Self::ChallengeAnswers; + fn domain(&self) -> Domain; + fn statement(&self) -> Self::Statement; + fn verify( + &self, + challenges: Self::Challenges, + challenge_answers: Self::ChallengeAnswers, + ) -> bool; +} diff --git a/src/witness/single.rs b/src/witness/single.rs new file mode 100644 index 0000000..d42014f --- /dev/null +++ b/src/witness/single.rs @@ -0,0 +1,155 @@ +use ark_crypto_primitives::{ + merkle_tree::{Config as MerkleConfig, LeafParam, MerkleTree, MultiPath, TwoToOneParam}, + sponge::{Absorb, CryptographicSponge}, +}; +use ark_ff::FftField; +use ark_poly::{univariate::DensePolynomial, Polynomial}; + +#[cfg(not(feature = "std"))] +use ark_std::vec::Vec; + +use crate::{ + domain::Domain, + statement::single::SingleStatement, + utils::{squeeze_integer, stack_evaluations}, + witness::Witness, +}; + +pub struct SingleWitness +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, +{ + argument: SingleWitnessArgument, + coeff: DensePolynomial, + domain: Domain, + commitment: MerkleTree, + committed_values: Vec>, +} + +impl Witness for SingleWitness +where + F: FftField, + M: MerkleConfig> + Clone, + M::InnerDigest: Absorb, + S: CryptographicSponge, + S::Config: Clone, +{ + type Argument = SingleWitnessArgument; + type Commitment = MerkleTree; + type CommittedValues = Vec>; + type Challenges = Vec; + type ChallengeAnswers = MultiPath; + type Statement = SingleStatement; + type MerkleConfig = M; + + fn new(argument: Self::Argument) -> Self { + // 1) Generate a commitment for the argument + let evals: Vec = argument + .coeff + .evaluate_over_domain_by_ref(argument.domain.backing_domain) + .evals; + let committed_values = stack_evaluations(evals, argument.folding_factor); + let commitment = MerkleTree::::new( + &argument.merkle_leaf_hash_param, + &argument.merkle_two_to_one_param, + &committed_values, + ) + .unwrap(); + + // 2) Keep everything the prover will need + Self { + argument: argument.clone(), + coeff: argument.coeff, + domain: argument.domain, + commitment, + committed_values, + } + } + fn coeff(&self) -> DensePolynomial { + self.coeff.clone() + } + fn coeff_degree(&self) -> usize { + self.coeff.degree() + } + fn commitment_digest(&self) -> M::InnerDigest { + self.commitment.root() + } + fn commitment(&self) -> MerkleTree { + self.commitment.clone() + } + fn committed_values(&self) -> Self::CommittedValues { + self.committed_values.clone() + } + fn challenges(&self, num_challenges: usize) -> Vec { + // absorb committment digest + let mut sponge = S::new(&self.argument.sponge_config); + sponge.absorb(&self.commitment.root()); + // squeeze out the challenges as indices + let mut challenges: Self::Challenges = Vec::with_capacity(num_challenges); + for _ in 0..num_challenges { + challenges.push(squeeze_integer(&mut sponge, self.committed_values.len())); + } + challenges + } + fn challenge_answers(&self, challenges: Self::Challenges) -> Self::ChallengeAnswers { + self.commitment.generate_multi_proof(challenges).unwrap() + } + fn domain(&self) -> Domain { + self.domain.clone() + } + fn statement(&self) -> Self::Statement { + SingleStatement::::new(self.commitment_digest()) + } + fn verify( + &self, + challenges: Self::Challenges, + challenge_answers: Self::ChallengeAnswers, + ) -> bool { + if challenge_answers.leaf_indexes != challenges { + return false; + } + challenge_answers + .verify( + &self.argument.merkle_leaf_hash_param, + &self.argument.merkle_two_to_one_param, + &self.commitment.root(), + self.committed_values.clone(), + ) + .unwrap() + } +} +impl Clone for SingleWitness +where + F: FftField, + M: MerkleConfig + Clone, + S: CryptographicSponge, + S::Config: Clone, +{ + fn clone(&self) -> Self { + Self { + argument: self.argument.clone(), + coeff: self.coeff.clone(), + domain: self.domain.clone(), + commitment: self.commitment.clone(), + committed_values: self.committed_values.clone(), + } + } +} + +// Use SingleWitnessArguement to instantiate a SingleWitness +#[derive(Clone)] +pub struct SingleWitnessArgument +where + F: FftField, + M: MerkleConfig, + S: CryptographicSponge, +{ + pub coeff: DensePolynomial, + pub domain: Domain, + pub folding_factor: usize, + pub merkle_leaf_hash_param: LeafParam, + pub merkle_two_to_one_param: TwoToOneParam, + pub sponge_config: S::Config, +}