diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 7f68372..c35100f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -79,6 +79,11 @@ jobs:
check_no_std:
name: Check no_std
runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ rust:
+ - stable
+ - nightly
steps:
- name: Checkout
uses: actions/checkout@v2
diff --git a/.gitignore b/.gitignore
index 6a0375b..c01bed6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,4 +9,9 @@ Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
-.idea
\ No newline at end of file
+# Editors
+.idea
+.vscode/**
+
+# Mac OSX
+.DS_Store
\ No newline at end of file
diff --git a/Cargo.toml b/Cargo.toml
index fefd556..c3eeec3 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,60 +1,53 @@
[package]
name = "ark-ldt"
-version = "0.1.0"
+version = "1.1.0"
authors = ["arkworks contributors"]
-edition = "2018"
-
-# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+edition = "2021"
[dependencies]
-ark-ff = { version = "^0.3.0", default-features = false }
-ark-std = { version = "^0.3.0", default-features = false }
-ark-r1cs-std = { version = "^0.3.0", default-features = false}
-ark-sponge = { version = "^0.3.0", default-features = false }
-ark-poly = { version = "0.3.0", default-features = false }
-ark-relations = { version = "^0.3.0", default-features = false, optional = true}
-tracing = { version = "0.1", default-features = false, features = [ "attributes" ], optional = true}
-
-[dev-dependencies]
-ark-test-curves = { version = "^0.3.0", default-features = false, features = ["bls12_381_scalar_field", "mnt4_753_scalar_field"] }
+ark-crypto-primitives = { version = "^0.4.0", default-features = false, features = ["merkle_tree", "sponge"] }
+ark-ff = { version = "0.4", default-features = false }
+ark-poly = { version = "0.4.2", default-features = false }
+ark-std = { version = "^0.4.0", default-features = false }
+ark-serialize = { version = "0.4", default-features = false }
+ark-test-curves = { version = "0.4", default-features = false, features = ["bls12_381_curve"] }
+itertools = { version = "0.13.0", default-features = false }
+spin = { version = "0.9.8", default-features = false, features = ["once", "mutex", "spin_mutex"] }
+hashbrown = { version = "0.14.5" }
[patch.crates-io]
-ark-sponge = {git = "https://github.com/arkworks-rs/sponge"}
-ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std" }
+ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" }
ark-ec = { git = "https://github.com/arkworks-rs/algebra" }
ark-ff = { git = "https://github.com/arkworks-rs/algebra" }
-ark-poly = { git = "https://github.com/arkworks-rs/algebra" }
ark-serialize = { git = "https://github.com/arkworks-rs/algebra" }
ark-std = { git = "https://github.com/arkworks-rs/std" }
ark-test-curves = { git = "https://github.com/arkworks-rs/algebra" }
-[profile.release]
-opt-level = 3
-lto = "thin"
-incremental = true
-panic = 'abort'
-
-[profile.bench]
-opt-level = 3
-debug = false
-rpath = false
-lto = "thin"
-incremental = true
-debug-assertions = false
-
-[profile.dev]
-opt-level = 0
-panic = 'abort'
-
-[profile.test]
-opt-level = 3
-lto = "thin"
-incremental = true
-debug-assertions = true
-debug = true
-
[features]
default = ["std"]
-std = ["ark-ff/std", "ark-std/std", "ark-relations/std", "ark-r1cs-std/std", "ark-sponge/std", "ark-poly/std"]
-r1cs = ["ark-sponge/r1cs", "tracing", "ark-relations"]
+std = [ "ark-crypto-primitives/std", "ark-ff/std", "ark-poly/std", "ark-serialize/std" ]
+[profile.release]
+ opt-level = 3
+ lto = "thin"
+ incremental = true
+ panic = 'abort'
+
+ [profile.bench]
+ opt-level = 3
+ debug = false
+ rpath = false
+ lto = "thin"
+ incremental = true
+ debug-assertions = false
+
+ [profile.dev]
+ opt-level = 0
+ panic = 'abort'
+
+ [profile.test]
+ opt-level = 3
+ lto = "thin"
+ incremental = true
+ debug-assertions = true
+ debug = true
diff --git a/README.md b/README.md
index d5fa55e..09bdc6f 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,7 @@ This implementation is NOT ready for production use.
## Overview
-A (univariate) low-degree test is an IOP that checks that a given function is close to a (univariate) polynomial of low degree. This library provides two LDTs: the **direct low-degree test** and the **FRI Protocol**. The library also comes with R1CS constraints for the LDT verifiers. Enable `r1cs` feature to use those constraints.
+A (univariate) low-degree test is an IOP that checks that a given function is close to a (univariate) polynomial of low degree. This library provides three LDTs: **Direct**, **FRI**, and **STIR**.
## Build Guide
@@ -44,8 +44,11 @@ To use this library, you need to add the following to your `Cargo.toml`. Note th
ark-ldt = {git = "https://github.com/arkworks-rs/ldt", branch="main", default-features = false}
[patch.crates-io]
-ark-sponge = {git = "https://github.com/arkworks-rs/sponge"}
-ark-r1cs-std = {git = "https://github.com/arkworks-rs/r1cs-std", branch = "master"}
+ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" }
+ark-ec = { git = "https://github.com/arkworks-rs/algebra/" }
+ark-ff = { git = "https://github.com/arkworks-rs/algebra" }
+ark-serialize = { git = "https://github.com/arkworks-rs/algebra" }
+ark-test-curves = { git = "https://github.com/arkworks-rs/algebra" }
```
## License
@@ -62,10 +65,14 @@ conditions.
## Reference papers
[Fractal: Post-Quantum and Transparent Recursive Proofs from Holography][cos20]
-Alessandro Chiesa, Dev Ojha, Nicholas Spooner
+Alessandro Chiesa, Dev Ojha, Nicholas Spooner
-[Fast Reed-Solomon Interactive Oracle Proofs of Proximity][bbhr17]
+[FRI: Fast Reed-Solomon Interactive Oracle Proofs of Proximity][bbhr17]
Eli Ben-Sasson, Iddo Bentov, Ynon Horesh, Michael Riabzev
+[STIR: Reed–Solomon Proximity Testing with Fewer Queries][acfy24]
+Gal Arnon, Alessandro Chiesa, Giacomo Fenzi, Eylon Yogev
+
[cos20]: https://eprint.iacr.org/2019/1076
[bbhr17]: https://eccc.weizmann.ac.il/report/2017/134/
+[acfy24]: https://eprint.iacr.org/2024/390
diff --git a/src/direct/config.rs b/src/direct/config.rs
new file mode 100644
index 0000000..8c8c393
--- /dev/null
+++ b/src/direct/config.rs
@@ -0,0 +1,31 @@
+use ark_crypto_primitives::{
+ merkle_tree::{Config as MerkleConfig, LeafParam, TwoToOneParam},
+ sponge::CryptographicSponge,
+};
+
+#[derive(Clone)]
+pub struct DirectConfig {
+ pub degree: usize,
+ pub num_challenges: usize,
+ pub merkle_leaf_hash_param: LeafParam,
+ pub merkle_two_to_one_param: TwoToOneParam,
+ pub sponge_config: S::Config,
+}
+
+impl DirectConfig {
+ pub fn new(
+ degree: usize,
+ num_challenges: usize,
+ merkle_leaf_hash_param: LeafParam,
+ merkle_two_to_one_param: TwoToOneParam,
+ sponge_config: S::Config,
+ ) -> Self {
+ DirectConfig {
+ degree,
+ num_challenges,
+ merkle_leaf_hash_param,
+ merkle_two_to_one_param,
+ sponge_config,
+ }
+ }
+}
diff --git a/src/direct/constraints.rs b/src/direct/constraints.rs
deleted file mode 100644
index cb4d51b..0000000
--- a/src/direct/constraints.rs
+++ /dev/null
@@ -1,40 +0,0 @@
-use ark_ff::PrimeField;
-use ark_r1cs_std::boolean::Boolean;
-use ark_r1cs_std::eq::EqGadget;
-use ark_r1cs_std::fields::fp::FpVar;
-use ark_r1cs_std::poly::polynomial::univariate::dense::DensePolynomialVar;
-use ark_relations::r1cs::SynthesisError;
-use ark_std::marker::PhantomData;
-
-/// Constraints for direct ldt.
-pub struct DirectLDTGadget {
- _marker: PhantomData,
-}
-
-impl DirectLDTGadget {
- /// ### Verifier Side
- ///
- /// The Direct LDT Verify function tests that given a list of coefficients `a_0, a_1, ..., a_{d-1}`
- /// an evaluation point `x`, and claimed evaluation `y`, that `y = \sum_{i =0}^{d} a_i x^i`.
- /// This proves that the provided coefficients of a degree `d` polynomial agree with the claimed
- /// `(evaluation_point, claimed_evaluation)` pair.
- /// This is used to construct a low degree test for an oracle to a claimed polynomials evaluations over a domain.
- /// By sampling enough (domain_element, claimed_evaluation) pairs from the oracle, and testing them
- /// via this method, you become convinced w.h.p. that the oracle is sufficiently close to the claimed coefficients list.
- pub fn verify(
- evaluation_point: FpVar,
- claimed_evaluation: FpVar,
- coefficients: &DensePolynomialVar,
- degree_bound: usize,
- ) -> Result, SynthesisError> {
- // make sure the degree is within degree_bound. No need to include degree_bound check
- // in constraints because the verifier can just verify the size of circuit.
- assert!(
- coefficients.coeffs.len() <= degree_bound + 1,
- "polynomial degree out of bound"
- );
- coefficients
- .evaluate(&evaluation_point)?
- .is_eq(&claimed_evaluation)
- }
-}
diff --git a/src/direct/ldt.rs b/src/direct/ldt.rs
new file mode 100644
index 0000000..8183018
--- /dev/null
+++ b/src/direct/ldt.rs
@@ -0,0 +1,50 @@
+use ark_crypto_primitives::{
+ merkle_tree::{Config as MerkleConfig, MultiPath},
+ sponge::{Absorb, CryptographicSponge},
+};
+use ark_ff::FftField;
+use ark_std::{marker::PhantomData, vec::Vec};
+
+use crate::{
+ direct::{config::DirectConfig, prover::DirectProver, verifier::DirectVerifier},
+ ldt::{LowDegreeTest, Prover, Verifier},
+ witness::Witness,
+};
+
+use super::proof::DirectProof;
+
+pub struct DirectLDT> {
+ _field: PhantomData,
+ _merkle_config: PhantomData,
+ _sponge: PhantomData,
+ _witness: PhantomData,
+}
+impl LowDegreeTest for DirectLDT
+where
+ F: FftField,
+ M: MerkleConfig> + Clone,
+ M::InnerDigest: Absorb,
+ S: CryptographicSponge,
+ S::Config: Clone,
+ W: Witness<
+ F,
+ M,
+ MerkleConfig = M,
+ ChallengeAnswers = MultiPath,
+ CommittedValues = Vec>,
+ Challenges = Vec,
+ > + Clone,
+ W::ChallengeAnswers: Clone,
+{
+ type LDTConfig = DirectConfig;
+ type Proof = DirectProof;
+ type Prover = DirectProver;
+ type Verifier = DirectVerifier;
+
+ fn new(config: Self::LDTConfig) -> (Self::Prover, Self::Verifier) {
+ (
+ Self::Prover::new(config.clone()),
+ Self::Verifier::new(config),
+ )
+ }
+}
diff --git a/src/direct/mod.rs b/src/direct/mod.rs
index c06d701..a28a596 100644
--- a/src/direct/mod.rs
+++ b/src/direct/mod.rs
@@ -1,88 +1,65 @@
-/// R1CS constraints for DirectLDT
-#[cfg(feature = "r1cs")]
-pub mod constraints;
-
-use crate::domain::Radix2CosetDomain;
-use ark_ff::PrimeField;
-use ark_poly::univariate::DensePolynomial;
-use ark_poly::Polynomial;
-use ark_std::marker::PhantomData;
-use ark_std::vec::Vec;
-/// Direct LDT by interpolating evaluations and truncating coefficients to low degree.
-/// /// This requires communication linear in the degree bound; use FRI for better communication complexity.
-pub struct DirectLDT {
- marker: PhantomData,
-}
-
-/// A linear-communication protocol for testing if a function is a polynomial of certain degree.
-/// Method is described in Aurora appendix C.1.
-///
-/// For now, the domain of the function needs to support IFFT.
-impl DirectLDT {
- /// ### Prover Side
- ///
- /// Generate the coefficient of the low-degree polynomial obtained by interpolating the domain evaluations.
- /// The polynomial is trimmed to `degree_bound` when necessary.
- pub fn generate_low_degree_coefficients(
- domain: Radix2CosetDomain,
- codewords: Vec,
- degree_bound: usize,
- ) -> DensePolynomial {
- let mut poly = domain.interpolate(codewords);
- // trim higher degree: if poly is higher degree, then the soundness should fail
- poly.coeffs.truncate(degree_bound + 1);
- poly
- }
-
- /// ### Verifier Side
- ///
- /// The Direct LDT Verify function tests that given a list of coefficients `a_0, a_1, ..., a_{d-1}`
- /// an evaluation point `x`, and claimed evaluation `y`, that `y = \sum_{i =0}^{d} a_i x^i`.
- /// This proves that the provided coefficients of a degree `d` polynomial agree with the claimed
- /// `(evaluation_point, claimed_evaluation)` pair.
- /// This is used to construct a low degree test for an oracle to a claimed polynomials evaluations over a domain.
- /// By sampling enough (domain_element, claimed_evaluation) pairs from the oracle, and testing them
- /// via this method, you become convinced w.h.p. that the oracle is sufficiently close to the claimed coefficients list.
- pub fn verify(
- evaluation_point: F,
- claimed_evaluation: F,
- bounded_coefficients: &DensePolynomial,
- ) -> bool {
- return bounded_coefficients.evaluate(&evaluation_point) == claimed_evaluation;
- }
-}
+pub mod config;
+pub mod ldt;
+pub mod proof;
+pub mod prover;
+pub mod verifier;
#[cfg(test)]
mod tests {
- use crate::direct::{DirectLDT, Radix2CosetDomain};
- use ark_ff::UniformRand;
+ use ark_crypto_primitives::sponge::poseidon::PoseidonSponge;
use ark_poly::univariate::DensePolynomial;
use ark_poly::DenseUVPolynomial;
use ark_std::test_rng;
- use ark_test_curves::bls12_381::Fr;
+
+ use crate::{
+ direct::{config::DirectConfig, ldt::DirectLDT},
+ domain::Domain,
+ ldt::{LowDegreeTest, Prover, Verifier},
+ test_helpers::{fields::Field256, fs, merkle_tree},
+ witness::{
+ single::{SingleWitness, SingleWitnessArgument},
+ Witness,
+ },
+ };
+
+ type TestField = Field256;
+ type TestMerkleConfig = merkle_tree::poseidon::MerkleTreeParams;
+ type TestSpongeConfig = PoseidonSponge;
+ type TestWitness = SingleWitness;
#[test]
fn test_direct_ldt() {
- let degree = 51;
-
+ // get ready
let mut rng = test_rng();
- let poly = DensePolynomial::::rand(degree, &mut rng);
- let domain_coset = Radix2CosetDomain::new_radix2_coset(52, Fr::rand(&mut rng));
- let evaluations = domain_coset.evaluate(&poly);
+ let (merkle_leaf_hash_param, merkle_two_to_one_param) =
+ merkle_tree::poseidon::default_config::(&mut rng, 2);
+ let config: DirectConfig = DirectConfig {
+ degree: 22,
+ num_challenges: 2,
+ merkle_leaf_hash_param: merkle_leaf_hash_param.clone(),
+ merkle_two_to_one_param: merkle_two_to_one_param.clone(),
+ sponge_config: fs::poseidon::poseidon_test_config::(),
+ };
+ let (prover, verifier) =
+ DirectLDT::::new(
+ config.clone(),
+ );
- let low_degree_poly = DirectLDT::generate_low_degree_coefficients(
- domain_coset.clone(),
- evaluations.to_vec(),
- degree,
- );
+ // generate witness
+ let witness: SingleWitness =
+ SingleWitness::new(SingleWitnessArgument {
+ coeff: DensePolynomial::::rand(config.degree, &mut rng),
+ domain: Domain::::new(config.degree, 0).unwrap(),
+ folding_factor: 1,
+ merkle_leaf_hash_param,
+ merkle_two_to_one_param,
+ sponge_config: config.sponge_config,
+ });
- let sampled_element = domain_coset.element(15);
- let sampled_evaluation = evaluations[15];
+ // prove
+ let direct_proof = prover.prove(&witness);
- assert!(DirectLDT::verify(
- sampled_element,
- sampled_evaluation,
- &low_degree_poly
- ))
+ // verify
+ assert_eq!(verifier.verify(&witness.statement(), &direct_proof), true);
}
}
diff --git a/src/direct/proof.rs b/src/direct/proof.rs
new file mode 100644
index 0000000..6c41d48
--- /dev/null
+++ b/src/direct/proof.rs
@@ -0,0 +1,85 @@
+use ark_crypto_primitives::{
+ merkle_tree::{Config as MerkleConfig, LeafParam, MultiPath, TwoToOneParam},
+ sponge::{Absorb, CryptographicSponge},
+};
+use ark_ff::FftField;
+
+#[cfg(not(feature = "std"))]
+use ark_std::vec::Vec;
+
+use crate::utils::{dedup, squeeze_integer};
+
+pub struct DirectProof
+where
+ F: FftField,
+ M: MerkleConfig,
+ S: CryptographicSponge,
+{
+ challenge_answers: MultiPath,
+ committed_values: Vec>,
+ merkle_leaf_hash_param: LeafParam,
+ merkle_two_to_one_param: TwoToOneParam,
+ sponge_config: S::Config,
+}
+
+impl DirectProof
+where
+ F: FftField,
+ M: MerkleConfig>,
+ M::InnerDigest: Absorb,
+ S: CryptographicSponge,
+{
+ pub fn new(
+ challenge_answers: MultiPath,
+ committed_values: Vec>,
+ merkle_leaf_hash_param: LeafParam,
+ merkle_two_to_one_param: TwoToOneParam,
+ sponge_config: S::Config,
+ ) -> Self {
+ Self {
+ challenge_answers,
+ committed_values,
+ merkle_leaf_hash_param,
+ merkle_two_to_one_param,
+ sponge_config,
+ }
+ }
+ pub fn challenges(
+ &self,
+ commitment_digest: M::InnerDigest,
+ num_challenges: usize,
+ ) -> Vec {
+ // absorb committment digest
+ let mut sponge = S::new(&self.sponge_config);
+ sponge.absorb(&commitment_digest);
+ // squeeze out the challenges as indices
+ let mut challenges = Vec::with_capacity(num_challenges);
+ for _ in 0..num_challenges {
+ challenges.push(squeeze_integer(&mut sponge, self.committed_values.len()));
+ }
+ dedup(challenges)
+ }
+ pub fn num_committed_values(&self) -> usize {
+ self.committed_values.len()
+ }
+ pub fn verify(&self, commitment_digest: M::InnerDigest, challenges: Vec) -> bool {
+ if self.challenge_answers.leaf_indexes != challenges {
+ return false;
+ }
+
+ let challenge_values: Vec> = self
+ .challenge_answers
+ .leaf_indexes
+ .iter()
+ .map(|&i| self.committed_values.get(i).unwrap().clone())
+ .collect();
+ self.challenge_answers
+ .verify(
+ &self.merkle_leaf_hash_param,
+ &self.merkle_two_to_one_param,
+ &commitment_digest,
+ challenge_values,
+ )
+ .unwrap()
+ }
+}
diff --git a/src/direct/prover.rs b/src/direct/prover.rs
new file mode 100644
index 0000000..1a327d8
--- /dev/null
+++ b/src/direct/prover.rs
@@ -0,0 +1,73 @@
+use ark_crypto_primitives::{
+ merkle_tree::{Config as MerkleConfig, MultiPath},
+ sponge::{Absorb, CryptographicSponge},
+};
+
+use ark_ff::FftField;
+use ark_std::marker::PhantomData;
+
+#[cfg(not(feature = "std"))]
+use ark_std::vec::Vec;
+
+use crate::{
+ direct::{config::DirectConfig, proof::DirectProof},
+ ldt::Prover,
+ witness::Witness,
+};
+
+pub struct DirectProver
+where
+ F: FftField,
+ M: MerkleConfig,
+ S: CryptographicSponge,
+ W: Witness,
+{
+ config: DirectConfig,
+ _field: PhantomData,
+ _merkle_config: PhantomData,
+ _sponge: PhantomData,
+ _witness: PhantomData,
+}
+
+impl Prover for DirectProver
+where
+ F: FftField,
+ M: MerkleConfig>,
+ M::InnerDigest: Absorb,
+ S: CryptographicSponge,
+ S::Config: Clone,
+ W: Witness<
+ F,
+ M,
+ MerkleConfig = M,
+ CommittedValues = Vec>,
+ ChallengeAnswers = MultiPath,
+ Challenges = Vec,
+ > + Clone,
+ W::ChallengeAnswers: Clone,
+{
+ type Witness = W;
+ type ProverConfig = DirectConfig;
+ type Proof = DirectProof;
+
+ fn new(config: DirectConfig) -> Self {
+ Self {
+ config,
+ _field: PhantomData::,
+ _merkle_config: PhantomData::,
+ _sponge: PhantomData::,
+ _witness: PhantomData::,
+ }
+ }
+
+ fn prove(&self, witness: &W) -> Self::Proof {
+ let challenges = witness.challenges(self.config.num_challenges);
+ DirectProof::::new(
+ witness.challenge_answers(challenges),
+ witness.committed_values(),
+ self.config.merkle_leaf_hash_param.clone(),
+ self.config.merkle_two_to_one_param.clone(),
+ self.config.sponge_config.clone(),
+ )
+ }
+}
diff --git a/src/direct/verifier.rs b/src/direct/verifier.rs
new file mode 100644
index 0000000..e52eb01
--- /dev/null
+++ b/src/direct/verifier.rs
@@ -0,0 +1,67 @@
+use ark_crypto_primitives::{
+ merkle_tree::Config as MerkleConfig,
+ sponge::{Absorb, CryptographicSponge},
+};
+use ark_ff::FftField;
+use ark_std::marker::PhantomData;
+
+#[cfg(not(feature = "std"))]
+use ark_std::vec::Vec;
+
+use crate::{
+ direct::{config::DirectConfig, proof::DirectProof},
+ ldt::Verifier,
+ statement::single::SingleStatement,
+ utils::{dedup, squeeze_integer},
+ witness::Witness,
+};
+
+pub struct DirectVerifier
+where
+ F: FftField,
+ M: MerkleConfig,
+ S: CryptographicSponge,
+ W: Witness,
+{
+ config: DirectConfig,
+ _field: PhantomData,
+ _merkle_config: PhantomData,
+ _sponge: PhantomData,
+ _witness: PhantomData,
+}
+impl Verifier for DirectVerifier
+where
+ F: FftField,
+ M: MerkleConfig>,
+ M::InnerDigest: Absorb,
+ S: CryptographicSponge,
+ W: Witness,
+ W::ChallengeAnswers: Clone,
+{
+ type Statement = SingleStatement;
+ type VerifierConfig = DirectConfig;
+ type Proof = DirectProof;
+
+ fn new(config: DirectConfig) -> Self {
+ Self {
+ config,
+ _field: PhantomData::,
+ _merkle_config: PhantomData::,
+ _sponge: PhantomData::,
+ _witness: PhantomData::,
+ }
+ }
+ fn verify(&self, statement: &Self::Statement, proof: &Self::Proof) -> bool {
+ // regenerate the challenges
+ let mut sponge = S::new(&self.config.sponge_config);
+ sponge.absorb(&statement.commitment_digest());
+ // squeeze out the challenges as indices
+ let mut challenges = Vec::with_capacity(self.config.num_challenges);
+ for _ in 0..self.config.num_challenges {
+ challenges.push(squeeze_integer(&mut sponge, proof.num_committed_values()));
+ }
+ challenges = dedup(challenges);
+ // verifiy the proof against the claim
+ proof.verify(statement.commitment_digest(), challenges)
+ }
+}
diff --git a/src/domain.rs b/src/domain.rs
new file mode 100644
index 0000000..42415af
--- /dev/null
+++ b/src/domain.rs
@@ -0,0 +1,190 @@
+use ark_ff::FftField;
+use ark_poly::{
+ EvaluationDomain, GeneralEvaluationDomain, MixedRadixEvaluationDomain, Radix2EvaluationDomain,
+};
+use ark_std::ops::Deref;
+
+#[derive(Debug, Clone)]
+pub struct Domain {
+ pub root_of_unity: F,
+ pub root_of_unity_inv: F,
+ pub backing_domain: GeneralEvaluationDomain,
+}
+
+impl Domain {
+ pub fn new(degree: usize, log_rho_inv: usize) -> Option {
+ let size = degree * (1 << log_rho_inv);
+ let backing_domain = GeneralEvaluationDomain::new(size)?;
+ let root_of_unity: F = match backing_domain {
+ GeneralEvaluationDomain::Radix2(r2) => r2.group_gen,
+ GeneralEvaluationDomain::MixedRadix(mr) => mr.group_gen,
+ };
+ let root_of_unity_inv = match backing_domain {
+ GeneralEvaluationDomain::Radix2(r2) => r2.group_gen_inv,
+ GeneralEvaluationDomain::MixedRadix(mr) => mr.group_gen_inv,
+ };
+ Some(Self {
+ backing_domain,
+ root_of_unity,
+ root_of_unity_inv,
+ })
+ }
+
+ pub fn size(&self) -> usize {
+ self.backing_domain.size()
+ }
+
+ // Takes the underlying backing_domain = , and computes the new domain
+ // (note this will have size |L| / power)
+ // NOTE: This should not be mixed with scale_offset
+ fn scale_generator_by(&self, power: usize) -> GeneralEvaluationDomain {
+ let starting_size = self.size();
+ assert_eq!(starting_size % power, 0);
+ let new_size = starting_size / power;
+ let log_size_of_group = new_size.trailing_zeros();
+ let size_as_field_element = F::from(new_size as u64);
+
+ match self.backing_domain {
+ GeneralEvaluationDomain::Radix2(r2) => {
+ let group_gen = r2.group_gen.pow([power as u64]);
+ let group_gen_inv = group_gen.inverse().unwrap();
+
+ let offset = r2.offset.pow([power as u64]);
+ let offset_inv = r2.offset_inv.pow([power as u64]);
+ let offset_pow_size = offset.pow([new_size as u64]);
+
+ GeneralEvaluationDomain::Radix2(Radix2EvaluationDomain {
+ size: new_size as u64,
+ log_size_of_group,
+ size_as_field_element,
+ size_inv: size_as_field_element.inverse().unwrap(),
+ group_gen,
+ group_gen_inv,
+ offset,
+ offset_inv,
+ offset_pow_size,
+ })
+ }
+ GeneralEvaluationDomain::MixedRadix(mr) => {
+ let group_gen = mr.group_gen.pow([power as u64]);
+ let group_gen_inv = mr.group_gen_inv.pow([power as u64]);
+
+ let offset = mr.offset.pow([power as u64]);
+ let offset_inv = mr.offset_inv.pow([power as u64]);
+ let offset_pow_size = offset.pow([new_size as u64]);
+
+ GeneralEvaluationDomain::MixedRadix(MixedRadixEvaluationDomain {
+ size: new_size as u64,
+ log_size_of_group,
+ size_as_field_element,
+ size_inv: size_as_field_element.inverse().unwrap(),
+ group_gen,
+ group_gen_inv,
+ offset,
+ offset_inv,
+ offset_pow_size,
+ })
+ }
+ }
+ }
+
+ // Take a domain L_0 = o * and compute a new domain L_1 = w * o^power * .
+ // Note that L_0^k \cap L_1 = \emptyset for k > power.
+ fn scale_with_offset(&self, power: usize) -> GeneralEvaluationDomain {
+ let starting_size = self.size();
+ assert_eq!(starting_size % power, 0);
+ let new_size = starting_size / power;
+ let log_size_of_group = new_size.trailing_zeros();
+ let size_as_field_element = F::from(new_size as u64);
+ match self.backing_domain {
+ GeneralEvaluationDomain::Radix2(r2) => {
+ let group_gen = r2.group_gen.pow([power as u64]);
+ let group_gen_inv = r2.group_gen_inv.pow([power as u64]);
+
+ let offset = r2.offset.pow([power as u64]) * self.root_of_unity;
+ let offset_inv = r2.offset_inv.pow([power as u64]) * self.root_of_unity_inv;
+
+ GeneralEvaluationDomain::Radix2(Radix2EvaluationDomain {
+ size: new_size as u64,
+ log_size_of_group,
+ size_as_field_element,
+ size_inv: size_as_field_element.inverse().unwrap(),
+ group_gen,
+ group_gen_inv,
+ offset,
+ offset_inv,
+ offset_pow_size: offset.pow([new_size as u64]),
+ })
+ }
+ GeneralEvaluationDomain::MixedRadix(mr) => {
+ let group_gen = mr.group_gen.pow([power as u64]);
+ let group_gen_inv = mr.group_gen_inv.pow([power as u64]);
+
+ let offset = mr.offset.pow([power as u64]) * self.root_of_unity;
+ let offset_inv = mr.offset_inv.pow([power as u64]) * self.root_of_unity_inv;
+
+ GeneralEvaluationDomain::MixedRadix(MixedRadixEvaluationDomain {
+ size: new_size as u64,
+ log_size_of_group,
+ size_as_field_element,
+ size_inv: size_as_field_element.inverse().unwrap(),
+ group_gen,
+ group_gen_inv,
+ offset,
+ offset_inv,
+ offset_pow_size: offset.pow([new_size as u64]),
+ })
+ }
+ }
+ }
+
+ pub fn scale(&self, power: usize) -> Self {
+ Self {
+ backing_domain: self.scale_generator_by(power),
+ ..*self
+ }
+ }
+
+ pub fn scale_offset(&self, power: usize) -> Self {
+ Self {
+ backing_domain: self.scale_with_offset(power),
+ ..*self
+ }
+ }
+}
+
+impl Deref for Domain {
+ type Target = GeneralEvaluationDomain;
+
+ fn deref(&self) -> &Self::Target {
+ &self.backing_domain
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use hashbrown::HashSet;
+
+ use super::*;
+ use crate::test_helpers::fields::Field64 as TestField;
+
+ #[test]
+ fn test_non_overlapping() {
+ let folding_factor = 16;
+
+ let l_0 = Domain::::new(64, 2).unwrap();
+
+ let l_0_k = l_0.scale(folding_factor);
+ let l_1 = l_0.scale_offset(2);
+ let l_1_k = l_1.scale_offset(folding_factor);
+ let l_2 = l_1.scale_offset(2);
+
+ let l_0_k_elements: HashSet<_> = l_0_k.elements().collect();
+ let l_1_elements: HashSet<_> = l_1.elements().collect();
+ let l_1_k_elements: HashSet<_> = l_1_k.elements().collect();
+ let l_2_elements: HashSet<_> = l_2.elements().collect();
+
+ assert_eq!(l_0_k_elements.intersection(&l_1_elements).count(), 0);
+ assert_eq!(l_1_k_elements.intersection(&l_2_elements).count(), 0);
+ }
+}
diff --git a/src/domain/mod.rs b/src/domain/mod.rs
deleted file mode 100644
index f70878e..0000000
--- a/src/domain/mod.rs
+++ /dev/null
@@ -1,312 +0,0 @@
-use ark_ff::PrimeField;
-use ark_poly::polynomial::univariate::DensePolynomial;
-use ark_poly::{
- DenseUVPolynomial, EvaluationDomain, Evaluations, Polynomial, Radix2EvaluationDomain,
-};
-#[cfg(feature = "r1cs")]
-use ark_r1cs_std::bits::boolean::Boolean;
-#[cfg(feature = "r1cs")]
-use ark_r1cs_std::fields::fp::FpVar;
-#[cfg(feature = "r1cs")]
-use ark_r1cs_std::fields::FieldVar;
-#[cfg(feature = "r1cs")]
-use ark_relations::r1cs::SynthesisError;
-use ark_std::vec::Vec;
-
-/// Given domain as ``, `CosetOfDomain` represents `h`
-///
-/// Constraint equivalent is in `r1cs_std::poly::domain`.
-#[derive(Clone, Copy, Eq, PartialEq, Debug)]
-pub struct Radix2CosetDomain {
- /// A non-coset radix 2 domain: ``
- pub base_domain: Radix2EvaluationDomain,
- /// offset `h`
- pub offset: F,
-}
-
-// TODO: Move this to algebra, per https://github.com/arkworks-rs/algebra/issues/88#issuecomment-734963835
-impl Radix2CosetDomain {
- /// Returns a new coset domain.
- pub fn new(base_domain: Radix2EvaluationDomain, offset: F) -> Self {
- Radix2CosetDomain {
- base_domain,
- offset,
- }
- }
-
- /// Returns a coset of size of power of two.
- pub fn new_radix2_coset(coset_size: usize, offset: F) -> Self {
- Self::new(Radix2EvaluationDomain::new(coset_size).unwrap(), offset)
- }
-
- /// Converts a query position to the elements of the unique coset of size `log_coset_size`
- /// within this domain that the query lies in.
- /// `query_position` is an index within this domain.
- /// Returns the positions of coset elements in `self`,
- /// and the coset represented as a Radix2CosetDomain.
- pub fn query_position_to_coset(
- &self,
- query_position: usize,
- log_coset_size: usize,
- ) -> (Vec, Self) {
- // make sure coset position is not out of range
- assert!(
- log_coset_size < self.base_domain.log_size_of_group as usize,
- "query coset size too large"
- );
- assert!(
- query_position < (1 << (self.base_domain.log_size_of_group - log_coset_size as u32)),
- "coset position out of range"
- );
-
- let dist_between_coset_elems =
- 1 << (self.base_domain.log_size_of_group as usize - log_coset_size);
-
- // generate coset
- let c = Self::new_radix2_coset(
- 1 << log_coset_size,
- self.offset * self.gen().pow(&[query_position as u64]),
- );
- // c.base_domain.group_gen = self.gen().pow(&[1 << (self.dim() - log_coset_size)]);
- // c.base_domain.group_gen_inv = c.base_domain.group_gen.inverse().unwrap(); // not necessary
-
- // generate positions
- let mut indices = Vec::with_capacity(1 << log_coset_size);
- for i in 0..(1 << log_coset_size) {
- indices.push(query_position + i * dist_between_coset_elems)
- }
-
- (indices, c)
- }
-
- /// returns the size of the domain
- pub fn size(&self) -> usize {
- self.base_domain.size()
- }
-
- /// return the log 2 size of domain
- pub fn dim(&self) -> usize {
- self.base_domain.log_size_of_group as usize
- }
-
- /// returns generator of the coset
- pub fn gen(&self) -> F {
- self.base_domain.group_gen
- }
-
- /// Given f(x) = \sum a_i x^i. Returns g(x) = \sum a_i h^i x^i
- ///
- /// Note that g(x) = f(hx)
- fn add_offset_to_coeffs(&self, poly: &DensePolynomial) -> DensePolynomial {
- let mut r = F::one();
- let mut transformed_coeff = Vec::with_capacity(poly.coeffs.len());
- for &coeff in poly.coeffs.iter() {
- transformed_coeff.push(coeff * r);
- r *= self.offset
- }
- DensePolynomial::from_coefficients_vec(transformed_coeff)
- }
-
- /// Given g(x) = \sum a_i h^i x^i. Returns f(x) = \sum a_i x^i
- ///
- /// Note that g(x) = f(hx)
- fn remove_offset_from_coeffs(&self, poly: &DensePolynomial) -> DensePolynomial {
- let mut r = F::one();
- let h_inv = self.offset.inverse().unwrap();
- let mut transformed_coeff = Vec::with_capacity(poly.coeffs.len());
- for &coeff in poly.coeffs.iter() {
- transformed_coeff.push(coeff * r);
- r *= h_inv
- }
- DensePolynomial::from_coefficients_vec(transformed_coeff)
- }
-
- /// Evaluate polynomial on this coset
- pub fn evaluate(&self, poly: &DensePolynomial) -> Vec {
- if self.size() < poly.degree() + 1 {
- // we use naive method for evaluating a polynomial larger than the domain size.
- // TODO: use a more efficient method using the fact that:
- // (hg)^{|base_domain|} = h^{|base_domain|},
- // so we can efficiently fold the polynomial's coefficients on itself,
- // into a single polynomial of degree `self.size() - 1`
- return self
- .base_domain
- .elements()
- .map(|g| poly.evaluate(&(self.offset * g)))
- .collect();
- }
- // g(x) = f(hx). So, f(coset) = g(base_domain)
- let gx = self.add_offset_to_coeffs(poly);
- gx.evaluate_over_domain(self.base_domain.clone()).evals
- }
-
- /// given evaluation over this coset. Interpolate and returns coefficients.
- pub fn interpolate(&self, evaluations: Vec) -> DensePolynomial {
- assert_eq!(evaluations.len(), self.base_domain.size());
- // first get g(x)
- let gx = Evaluations::from_vec_and_domain(evaluations, self.base_domain).interpolate();
- // g(x) = f(hx). Let g(x) = \sum a_i h^i x^i. Then f(x) = \sum a_i x^i
- let fx = self.remove_offset_from_coeffs(&gx);
- fx
- }
-
- /// Returns an element of the coset
- pub fn element(&self, i: usize) -> F {
- self.base_domain.element(i) * self.offset
- }
-
- #[cfg(feature = "r1cs")]
- /// Returns an element fo the coset, given the index as a variable.
- pub fn element_var(&self, index: &[Boolean]) -> Result, SynthesisError> {
- Ok(FpVar::constant(self.offset) * FpVar::constant(self.gen()).pow_le(index)?)
- }
-
- /// Shrink the domain size such that new domain size = `self.size() / (1 << log_shrink_factor)`
- /// and has same offset.
- pub fn fold(&self, log_shrink_factor: u64) -> Radix2CosetDomain {
- let coset_size = 1 << log_shrink_factor;
- let domain_size = self.base_domain.size;
- let dist_between_coset_elems = domain_size / coset_size;
- Radix2CosetDomain::new_radix2_coset(dist_between_coset_elems as usize, self.offset)
- }
-}
-
-#[cfg(test)]
-mod tests {
- use ark_poly::univariate::DensePolynomial;
- use ark_poly::{DenseUVPolynomial, Polynomial};
- use ark_std::{test_rng, UniformRand};
- use ark_test_curves::bls12_381::Fr;
-
- use crate::domain::Radix2CosetDomain;
-
- #[cfg(feature = "r1cs")]
- mod consistency_with_constraints {
- use ark_poly::univariate::DensePolynomial;
- use ark_poly::Radix2EvaluationDomain;
- use ark_poly::{DenseUVPolynomial, EvaluationDomain, Polynomial};
- use ark_r1cs_std::alloc::AllocVar;
- use ark_r1cs_std::fields::fp::FpVar;
- use ark_r1cs_std::fields::FieldVar;
- use ark_r1cs_std::poly::domain::Radix2DomainVar;
- use ark_r1cs_std::poly::evaluations::univariate::EvaluationsVar;
- use ark_r1cs_std::R1CSVar;
- use ark_relations::r1cs::ConstraintSystem;
- use ark_std::{test_rng, UniformRand};
- use ark_test_curves::bls12_381::Fr;
-
- use crate::domain::Radix2CosetDomain;
-
- #[test]
- fn test_consistency_with_coset_constraints() {
- let mut rng = test_rng();
- let degree = 51;
- let poly = DensePolynomial::::rand(degree, &mut rng);
- let base_domain = Radix2EvaluationDomain::new(degree + 1).unwrap();
- let offset = Fr::rand(&mut rng);
- let coset = Radix2CosetDomain::new(base_domain, offset);
-
- // test evaluation
- let expected_eval: Vec<_> = coset
- .base_domain
- .elements()
- .map(|x| poly.evaluate(&(offset * x)))
- .collect();
- let actual_eval = coset.evaluate(&poly);
- assert_eq!(actual_eval, expected_eval);
-
- // test interpolation
- let interpolated_poly = coset.interpolate(expected_eval.to_vec());
- assert_eq!(interpolated_poly, poly);
-
- // test consistency with r1cs-std
- let cs = ConstraintSystem::new_ref();
- let eval_var: Vec<_> = expected_eval
- .iter()
- .map(|x| FpVar::new_witness(ark_relations::ns!(cs, "eval_var"), || Ok(*x)).unwrap())
- .collect();
-
- let r1cs_coset = Radix2DomainVar::new(
- base_domain.group_gen,
- ark_std::log2(degree.next_power_of_two()) as u64,
- FpVar::constant(offset),
- )
- .unwrap();
- let eval_var = EvaluationsVar::from_vec_and_domain(eval_var, r1cs_coset, true);
-
- let pt = Fr::rand(&mut rng);
- let pt_var =
- FpVar::new_witness(ark_relations::ns!(cs, "random point"), || Ok(pt)).unwrap();
-
- let expected = poly.evaluate(&pt);
- let actual = eval_var.interpolate_and_evaluate(&pt_var).unwrap();
-
- assert_eq!(actual.value().unwrap(), expected);
- assert!(cs.is_satisfied().unwrap());
- }
- }
-
- #[test]
- fn query_coset_test() {
- let mut rng = test_rng();
- let poly = DensePolynomial::rand(4, &mut rng);
-
- let offset = Fr::rand(&mut rng);
- let domain_coset = Radix2CosetDomain::new_radix2_coset(15, offset);
-
- let evals_on_domain_coset = domain_coset.evaluate(&poly);
- assert_eq!(
- poly.evaluate(&domain_coset.element(2)),
- evals_on_domain_coset[2]
- );
-
- let (query_coset_pos, query_coset) = domain_coset.query_position_to_coset(2, 2);
-
- assert_eq!(query_coset_pos, vec![2, 6, 10, 14]);
-
- assert_eq!(query_coset.element(0), domain_coset.element(2));
- assert_eq!(query_coset.element(1), domain_coset.element(6));
- assert_eq!(query_coset.element(2), domain_coset.element(10));
- assert_eq!(query_coset.element(3), domain_coset.element(14));
-
- assert_eq!(
- query_coset.evaluate(&poly),
- vec![
- evals_on_domain_coset[2],
- evals_on_domain_coset[6],
- evals_on_domain_coset[10],
- evals_on_domain_coset[14]
- ]
- )
- }
-
- #[test]
- #[cfg(feature = "r1cs")]
- fn element_var_test() {
- use ark_r1cs_std::alloc::AllocVar;
- use ark_r1cs_std::uint64::UInt64;
- use ark_r1cs_std::R1CSVar;
- use ark_relations::r1cs::ConstraintSystem;
- use ark_relations::*;
-
- let mut rng = test_rng();
- let offset = Fr::rand(&mut rng);
- let domain_coset = Radix2CosetDomain::new_radix2_coset(15, offset);
-
- let cs = ConstraintSystem::new_ref();
- let index = 11;
- let index_var = UInt64::new_witness(ns!(cs, "index"), || Ok(index))
- .unwrap()
- .to_bits_le();
-
- let expected = domain_coset.element(index as usize);
- let actual = domain_coset
- .element_var(&index_var)
- .unwrap()
- .value()
- .unwrap();
-
- assert_eq!(expected, actual);
- assert!(cs.is_satisfied().unwrap())
- }
-}
diff --git a/src/fri/constraints/mod.rs b/src/fri/constraints/mod.rs
deleted file mode 100644
index 1a3b48c..0000000
--- a/src/fri/constraints/mod.rs
+++ /dev/null
@@ -1,369 +0,0 @@
-#![allow(unused)] // temporary
-use crate::direct::constraints::DirectLDTGadget;
-use crate::domain::Radix2CosetDomain;
-use crate::fri::FRIParameters;
-use ark_ff::PrimeField;
-use ark_r1cs_std::bits::boolean::Boolean;
-use ark_r1cs_std::eq::EqGadget;
-use ark_r1cs_std::fields::fp::FpVar;
-use ark_r1cs_std::fields::FieldVar;
-use ark_r1cs_std::poly::domain::Radix2DomainVar;
-use ark_r1cs_std::poly::evaluations::univariate::EvaluationsVar;
-use ark_r1cs_std::poly::polynomial::univariate::dense::DensePolynomialVar;
-use ark_r1cs_std::prelude::CondSelectGadget;
-use ark_relations::r1cs::SynthesisError;
-use ark_sponge::constraints::CryptographicSpongeVar;
-use ark_sponge::FieldBasedCryptographicSponge;
-use ark_std::marker::PhantomData;
-use ark_std::vec::Vec;
-
-/// Constraints for FRI verifier.
-pub struct FRIVerifierGadget {
- _marker: PhantomData,
-}
-
-impl FRIVerifierGadget {
- /// ## Step 1: Interative Phase
- /// Sample alpha in interactive phase.
- pub fn interactive_phase_single_round<
- S: FieldBasedCryptographicSponge,
- SV: CryptographicSpongeVar,
- >(
- sponge_var: &mut SV,
- ) -> Result, SynthesisError> {
- Ok(sponge_var
- .squeeze_field_elements(1)?
- .first()
- .unwrap()
- .clone())
- }
-
- /// ## Step 2: Sample Queried Coset
- /// Sample the coset to be queried.
- pub fn sample_coset_index<
- S: FieldBasedCryptographicSponge,
- SV: CryptographicSpongeVar,
- >(
- sponge_var: &mut SV,
- fri_parameters: &FRIParameters,
- ) -> Result>, SynthesisError> {
- let log_num_cosets =
- fri_parameters.domain.dim() - fri_parameters.localization_parameters[0] as usize;
- sponge_var.squeeze_bits(log_num_cosets)
- }
-
- /// ## Step 2: Query Phase (Prepare Query)
- /// Prepare one query given the random coset index. The returned value `queries[i]` is the coset query
- /// of the `ith` round polynomial (including codeword but does not include final polynomial).
- /// Final polynomial is not queried. Instead, verifier will get
- /// the whole final polynomial in evaluation form, and do direct LDT.
- ///
- /// Returns the all query domains, and query coset index, final polynomial domain
- pub fn prepare_query(
- rand_coset_index: Vec>,
- fri_parameters: &FRIParameters,
- ) -> Result<
- (
- Vec>,
- Vec>>,
- Radix2CosetDomain,
- ),
- SynthesisError,
- > {
- let num_fri_rounds = fri_parameters.localization_parameters.len();
- let mut coset_indices = Vec::new();
- let mut curr_coset_index = rand_coset_index;
- let mut queries = Vec::with_capacity(num_fri_rounds);
- let mut curr_round_domain = fri_parameters.domain;
-
- // sample coset index
- for i in 0..num_fri_rounds {
- let log_dist_between_coset_elems =
- curr_round_domain.dim() - fri_parameters.localization_parameters[i] as usize;
- curr_coset_index = curr_coset_index[..log_dist_between_coset_elems].to_vec();
-
- coset_indices.push(curr_coset_index.clone());
-
- // get the query coset from coset index
- let query_gen = fri_parameters.domain.gen().pow(&[1
- << (fri_parameters.domain.dim()
- - fri_parameters.localization_parameters[i] as usize)]);
- debug_assert_eq!(
- query_gen.pow(&[1 << fri_parameters.localization_parameters[i]]),
- F::one()
- );
-
- let query_offset = &FpVar::constant(curr_round_domain.offset)
- * &(FpVar::constant(curr_round_domain.gen()).pow_le(&curr_coset_index)?);
-
- let query_coset = Radix2DomainVar::new(
- query_gen,
- fri_parameters.localization_parameters[i],
- query_offset,
- )?;
-
- queries.push(query_coset);
-
- curr_round_domain = curr_round_domain.fold(fri_parameters.localization_parameters[i])
- }
-
- Ok((queries, coset_indices, curr_round_domain))
- }
-
- /// Map coset in current round to a single point in next round.
- ///
- /// Essentially, this function interpolates the polynomial and evaluate on `alpha`.
- fn expected_evaluation(
- coset: &Radix2DomainVar,
- queried_evaluations: Vec>,
- alpha: FpVar,
- ) -> Result, SynthesisError> {
- let evaluations =
- EvaluationsVar::from_vec_and_domain(queried_evaluations, coset.clone(), true);
- evaluations.interpolate_and_evaluate(&alpha)
- }
-
- /// ## Step 3: Decision Phase (Check query)
- /// After preparing the query, verifier get the evaluations of corresponding query. Those evaluations needs
- /// to be checked by merkle tree. Then verifier calls this method to check if polynomial sent in each round
- /// is consistent with each other, and the final polynomial is low-degree.
- ///
- /// `queries[i]` is the coset query of the `ith` round polynomial, including the codeword polynomial.
- /// `queried_evaluations` stores the result of corresponding query.
- pub fn consistency_check(
- fri_parameters: &FRIParameters,
- queried_coset_indices: &[Vec>],
- queries: &[Radix2DomainVar],
- queried_evaluations: &[Vec>],
- alphas: &[FpVar],
- final_polynomial_domain: &Radix2CosetDomain,
- final_polynomial: &DensePolynomialVar,
- ) -> Result, SynthesisError> {
- let mut expected_next_round_eval = FpVar::zero();
-
- debug_assert_eq!(fri_parameters.localization_parameters.len(), queries.len());
- let mut check_result = Boolean::constant(true);
- for i in 0..queries.len() {
- expected_next_round_eval = FRIVerifierGadget::expected_evaluation(
- &queries[i],
- queried_evaluations[i].clone(),
- alphas[i].clone(),
- )?;
-
- // check if current round result is consistent with next round polynomial (if next round is not final)
- if i < queries.len() - 1 {
- let next_localization_param =
- fri_parameters.localization_parameters[i + 1] as usize;
- let log_next_dist_between_coset_elems =
- fri_parameters.log_round_coset_sizes[i + 1] - next_localization_param;
- // native code: queried_coset_indices[i] >> log_next_dist_between_coset_elems
- let next_intra_coset_index =
- &queried_coset_indices[i][log_next_dist_between_coset_elems..];
-
- let actual = FpVar::::conditionally_select_power_of_two_vector(
- next_intra_coset_index,
- &queried_evaluations[i + 1],
- )?;
-
- check_result = check_result.and(&expected_next_round_eval.is_eq(&actual)?)?;
- }
- }
-
- // check final polynomial (low degree & consistency check)
- // We assume degree_bound is power of 2.
- assert!(fri_parameters.tested_degree.is_power_of_two());
- let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum();
- let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor;
-
- let final_element_index = queried_coset_indices.last().unwrap();
-
- DirectLDTGadget::verify(
- final_polynomial_domain.element_var(final_element_index)?,
- expected_next_round_eval,
- final_polynomial,
- final_poly_degree_bound as usize,
- )
- }
-}
-
-#[cfg(test)]
-mod tests {
- use crate::direct::DirectLDT;
- use crate::domain::Radix2CosetDomain;
- use crate::fri::constraints::FRIVerifierGadget;
- use crate::fri::prover::FRIProver;
- use crate::fri::verifier::FRIVerifier;
- use crate::fri::FRIParameters;
- use ark_poly::polynomial::univariate::DensePolynomial;
- use ark_poly::DenseUVPolynomial;
- use ark_r1cs_std::alloc::AllocVar;
- use ark_r1cs_std::bits::uint64::UInt64;
- use ark_r1cs_std::fields::fp::FpVar;
- use ark_r1cs_std::poly::polynomial::univariate::dense::DensePolynomialVar;
- use ark_r1cs_std::R1CSVar;
- use ark_relations::r1cs::ConstraintSystem;
- use ark_relations::*;
- use ark_std::{test_rng, UniformRand};
- use ark_test_curves::bls12_381::Fr;
-
- #[test]
- fn test_prepare_query() {
- let mut rng = test_rng();
- let offset = Fr::rand(&mut rng);
- let domain_input = Radix2CosetDomain::new_radix2_coset(1 << 7, offset);
-
- let fri_parameters = FRIParameters::new(32, vec![1, 2, 1], domain_input);
-
- let rand_coset_index = 31usize;
- let cs = ConstraintSystem::new_ref();
- let rand_coset_index_var =
- UInt64::new_witness(ns!(cs, "rand_coset_index"), || Ok(rand_coset_index as u64))
- .unwrap();
- let rand_coset_index_var_arr = rand_coset_index_var.to_bits_le()[..(1 << 6)].to_vec();
-
- let rand_coset_index = 31;
- let (query_cosets, query_indices, domain_final) =
- FRIVerifier::prepare_query(rand_coset_index, &fri_parameters);
- let (query_cosets_actual, query_indices_actual, domain_final_actual) =
- FRIVerifierGadget::prepare_query(rand_coset_index_var_arr, &fri_parameters).unwrap();
-
- for i in 0..query_cosets.len() {
- assert_eq!(
- query_cosets_actual[i].offset().value().unwrap(),
- query_cosets[i].offset
- );
- assert_eq!(query_cosets_actual[i].gen, query_cosets[i].gen());
- assert_eq!(query_cosets_actual[i].dim as usize, query_cosets[i].dim());
- }
-
- assert_eq!(domain_final, domain_final_actual)
- }
-
- #[test]
- fn two_rounds_fri_test() {
- let cs = ConstraintSystem::new_ref();
-
- let mut rng = test_rng();
- let poly = DensePolynomial::rand(64, &mut rng);
- let offset = Fr::rand(&mut rng);
- let domain_input = Radix2CosetDomain::new_radix2_coset(128, offset);
- let evaluations_input = domain_input.evaluate(&poly);
-
- // set up verifier parameters
- let fri_parameters = FRIParameters::new(64, vec![1, 2, 2], domain_input);
- let alphas: Vec<_> = (0..3).map(|_| Fr::rand(&mut rng)).collect();
- let alphas_var: Vec<_> = alphas
- .iter()
- .map(|x| FpVar::new_witness(ns!(cs, "alphas"), || Ok(x)).unwrap())
- .collect();
-
- // prover commits all round polynomial
- let (domain_round_0, evaluations_round_0) = FRIProver::interactive_phase_single_round(
- domain_input,
- evaluations_input.clone(),
- fri_parameters.localization_parameters[0],
- alphas[0],
- );
-
- let (domain_round_1, evaluations_round_1) = FRIProver::interactive_phase_single_round(
- domain_round_0,
- evaluations_round_0.clone(),
- fri_parameters.localization_parameters[1],
- alphas[1],
- );
-
- let (expected_domain_final, evaluations_final) = FRIProver::interactive_phase_single_round(
- domain_round_1,
- evaluations_round_1.clone(),
- fri_parameters.localization_parameters[2],
- alphas[2],
- );
-
- let rand_coset_index = 31;
- let rand_coset_index_var =
- UInt64::new_witness(ns!(cs, "rand_coset_index"), || Ok(rand_coset_index))
- .unwrap()
- .to_bits_le();
-
- let (query_cosets, query_indices, domain_final) =
- FRIVerifierGadget::prepare_query(rand_coset_index_var, &fri_parameters).unwrap();
- let (_, query_indices_native, _) =
- FRIVerifier::prepare_query(rand_coset_index as usize, &fri_parameters);
-
- assert_eq!(query_indices.len(), 3);
- assert_eq!(domain_final, expected_domain_final);
-
- let (indices, qi) = domain_input.query_position_to_coset(
- query_indices_native[0],
- fri_parameters.localization_parameters[0] as usize,
- );
- assert_eq!(qi.offset, query_cosets[0].offset().value().unwrap());
- let answer_input: Vec<_> = indices
- .iter()
- .map(|&i| {
- FpVar::new_witness(ns!(cs, "answer_input"), || Ok(evaluations_input[i])).unwrap()
- })
- .collect();
-
- let (indices, q0) = domain_round_0.query_position_to_coset(
- query_indices_native[1],
- fri_parameters.localization_parameters[1] as usize,
- );
- assert_eq!(q0.offset, query_cosets[1].offset().value().unwrap());
- let answer_round_0: Vec<_> = indices
- .iter()
- .map(|&i| {
- FpVar::new_witness(
- ns!(cs, "evaluations_round_0"),
- || Ok(evaluations_round_0[i]),
- )
- .unwrap()
- })
- .collect();
-
- let (indices, q1) = domain_round_1.query_position_to_coset(
- query_indices_native[2],
- fri_parameters.localization_parameters[2] as usize,
- );
- let answer_round_1: Vec<_> = indices
- .iter()
- .map(|&i| {
- FpVar::new_witness(
- ns!(cs, "evaluations_round_1"),
- || Ok(evaluations_round_1[i]),
- )
- .unwrap()
- })
- .collect();
- assert_eq!(q1.offset, query_cosets[2].offset().value().unwrap());
-
- let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum();
- let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor;
- let final_polynomial = DirectLDT::generate_low_degree_coefficients(
- domain_final,
- evaluations_final,
- final_poly_degree_bound as usize,
- );
- let final_polynomial_coeffs: Vec<_> = final_polynomial
- .coeffs()
- .iter()
- .map(|x| FpVar::new_witness(ns!(cs, "final_poly_coeff"), || Ok(*x)).unwrap())
- .collect();
- let final_polynomial_var =
- DensePolynomialVar::from_coefficients_slice(&final_polynomial_coeffs);
-
- let result = FRIVerifierGadget::consistency_check(
- &fri_parameters,
- &query_indices,
- &query_cosets,
- &vec![answer_input, answer_round_0, answer_round_1],
- &alphas_var,
- &domain_final,
- &final_polynomial_var,
- )
- .unwrap();
-
- assert!(result.value().unwrap());
- assert!(cs.is_satisfied().unwrap());
- }
-}
diff --git a/src/fri/mod.rs b/src/fri/mod.rs
deleted file mode 100644
index 7d2aabf..0000000
--- a/src/fri/mod.rs
+++ /dev/null
@@ -1,56 +0,0 @@
-use crate::domain::Radix2CosetDomain;
-use ark_ff::PrimeField;
-use ark_std::marker::PhantomData;
-use ark_std::vec::Vec;
-/// R1CS constraints for FRI Verifier.
-#[cfg(feature = "r1cs")]
-pub mod constraints;
-/// Prover used by FRI protocol.
-pub mod prover;
-/// Verifier used by FRI protocol.
-pub mod verifier;
-
-/// Some parameters used by FRI verifiers.
-#[derive(Clone)]
-pub struct FRIParameters {
- /// The degree
- pub tested_degree: u64,
- /// At each round `i`, domain size will shrink to `last_round_domain_size` / `localization_parameters[i]`^2
- pub localization_parameters: Vec,
- /// Evaluation domain, which is represented as a coset.
- pub domain: Radix2CosetDomain,
- /// coset sizes in each round (first round is input coset)
- log_round_coset_sizes: Vec,
-}
-
-impl FRIParameters {
- /// Check parameter validity and returns new `FRIParameters`.
- pub fn new(
- tested_degree: u64,
- localization_parameters: Vec,
- domain: Radix2CosetDomain,
- ) -> Self {
- assert!(
- domain.size() >= tested_degree as usize + 1,
- "Evaluations is not low degree!\
- Domain size needs to be >= tested_degree + 1"
- );
- let mut log_round_coset_sizes = Vec::new();
- log_round_coset_sizes.push(domain.dim());
- for i in 0..localization_parameters.len() {
- log_round_coset_sizes
- .push(log_round_coset_sizes[i] - localization_parameters[i] as usize)
- }
- FRIParameters {
- tested_degree,
- localization_parameters,
- domain,
- log_round_coset_sizes,
- }
- }
-}
-
-/// Fast Reed-Solomon Interactive Oracle Proof of Proximity
-pub struct FRI {
- _protocol: PhantomData,
-}
diff --git a/src/fri/prover.rs b/src/fri/prover.rs
deleted file mode 100644
index b7a73bd..0000000
--- a/src/fri/prover.rs
+++ /dev/null
@@ -1,250 +0,0 @@
-use crate::domain::Radix2CosetDomain;
-use ark_ff::{batch_inversion_and_mul, PrimeField};
-use ark_r1cs_std::poly::evaluations::univariate::lagrange_interpolator::LagrangeInterpolator;
-use ark_std::marker::PhantomData;
-use ark_std::vec::Vec;
-/// FRI Prover
-pub struct FRIProver {
- _prover: PhantomData,
-}
-
-impl FRIProver {
- /// Single round prover in commit phase. Returns the evaluation oracles for next round.
- ///
- /// Returns domain for next round polynomial and evaluations over the domain.
- pub fn interactive_phase_single_round_naive(
- domain: Radix2CosetDomain,
- evaluation_oracles_over_domain: Vec,
- localization_param: u64,
- alpha: F,
- ) -> (Radix2CosetDomain, Vec) {
- let coset_size = 1 << localization_param;
- let domain_size = domain.base_domain.size;
- let dist_between_coset_elems = domain_size / coset_size;
- let mut new_evals = Vec::with_capacity(dist_between_coset_elems as usize);
- let coset_generator = domain
- .gen()
- .pow(&[1 << (domain.dim() as u64 - localization_param)]);
- let mut cur_coset_offset = domain.offset;
-
- for coset_index in 0..dist_between_coset_elems {
- let mut poly_evals = Vec::new();
- for intra_coset_index in 0..coset_size {
- poly_evals.push(
- evaluation_oracles_over_domain
- [(coset_index + intra_coset_index * dist_between_coset_elems) as usize],
- );
- }
-
- let interpolator = LagrangeInterpolator::new(
- cur_coset_offset,
- coset_generator,
- localization_param,
- poly_evals,
- );
- new_evals.push(interpolator.interpolate(alpha));
- cur_coset_offset *= domain.gen();
- }
-
- let c = Radix2CosetDomain::new_radix2_coset(new_evals.len(), domain.offset);
- // c.base_domain.group_gen = coset_generator;
- // c.base_domain.group_gen_inv = coset_generator.inverse().unwrap();
- debug_assert_eq!(coset_generator.pow(&[new_evals.len() as u64]), F::one());
- debug_assert_eq!(c.size(), new_evals.len());
- (c, new_evals)
- }
-
- /// Single round prover in commit phase. Returns the polynomial for next round
- /// represented by evaluations over domain in next round.
- ///
- /// Returns domain for next round polynomial and evaluations over the domain.
- pub fn interactive_phase_single_round(
- domain: Radix2CosetDomain,
- evals_over_domain: Vec,
- localization_param: u64,
- alpha: F,
- ) -> (Radix2CosetDomain, Vec) {
- let coset_size = 1 << localization_param;
- let num_cosets = domain.size() / coset_size;
- let mut next_f_i = Vec::with_capacity(num_cosets); // new_evals
-
- let h_inc = domain.gen();
- let h_inc_to_coset_inv_plus_one =
- h_inc.pow(&[coset_size as u64]).inverse().unwrap() * h_inc;
-
- let shiftless_coset = Radix2CosetDomain::new_radix2_coset(coset_size, F::one());
- let g = shiftless_coset.gen();
- let g_inv = g.inverse().unwrap();
- let x_to_order_coset = alpha.pow(&[coset_size as u64]);
-
- // x * g^{-k}
- let mut shifted_x_elements = Vec::with_capacity(coset_size);
- shifted_x_elements.push(alpha);
- for i in 1..coset_size {
- shifted_x_elements.push(shifted_x_elements[i - 1] * g_inv);
- }
-
- let mut cur_h = domain.offset;
- let first_h_to_coset_inv_plus_one =
- cur_h.pow(&[coset_size as u64]).inverse().unwrap() * cur_h;
- let mut cur_coset_constant_plus_h = x_to_order_coset * first_h_to_coset_inv_plus_one;
-
- /* x * g^{-k} - h, for all combinations of k, h. */
- let mut elements_to_invert = Vec::with_capacity(evals_over_domain.len());
-
- /* constant for each coset, equal to
- * vp_coset(x) / h^{|coset| - 1} = x^{|coset|} h^{-|coset| + 1} - h */
- let mut constant_for_each_coset = Vec::with_capacity(num_cosets);
-
- let constant_for_all_cosets = F::from(coset_size as u128).inverse().unwrap();
- let mut x_ever_in_domain = false;
- let mut x_coset_index = 0;
- let mut x_index_in_domain = 0;
-
- /* First we create all the constants for each coset,
- and the entire vector of elements to invert, xg^{-k} - h.
- */
-
- for j in 0..num_cosets {
- /* coset constant = x^|coset| * h^{1 - |coset|} - h */
- let coset_constant: F = cur_coset_constant_plus_h - cur_h;
- constant_for_each_coset.push(coset_constant);
- /* coset_constant = vp_coset(x) * h^{-|coset| + 1},
- since h is non-zero, coset_constant is zero iff vp_coset(x) is zero.
- If vp_coset(x) is zero, then x is in the coset. */
- let x_in_coset = coset_constant.is_zero();
- /* if x is in the coset, we mark which position x is within f_i_domain,
- and we pad elements to invert to simplify inversion later. */
- if x_in_coset {
- x_ever_in_domain = true;
- x_coset_index = j;
- // find which element in the coset x belongs to.
- // also pad elements_to_invert to simplify indexing
- let mut cur_elem = cur_h;
- for k in 0..coset_size {
- if cur_elem == alpha {
- x_index_in_domain = k * num_cosets + j;
- }
- cur_elem *= g;
- elements_to_invert.push(F::one());
- }
- continue;
- }
-
- /* Append all elements to invert, (xg^{-k} - h) */
- for k in 0..coset_size {
- elements_to_invert.push(shifted_x_elements[k] - cur_h);
- }
-
- cur_h *= h_inc;
- /* coset constant = x^|coset| * h^{1 - |coset|} - h
- So we can efficiently increment x^|coset| * h^{1 - |coset|} */
- cur_coset_constant_plus_h *= h_inc_to_coset_inv_plus_one;
- }
- /* Technically not lagrange coefficients, its missing the constant for each coset */
- batch_inversion_and_mul(&mut elements_to_invert, &constant_for_all_cosets);
- let lagrange_coefficients = elements_to_invert;
- for j in 0..num_cosets {
- let mut interpolation = F::zero();
- for k in 0..coset_size {
- interpolation += evals_over_domain[k * num_cosets + j]
- * lagrange_coefficients[j * coset_size + k];
- }
- /* Multiply the constant for each coset, to get the correct interpolation */
- interpolation *= constant_for_each_coset[j];
- next_f_i.push(interpolation);
- }
-
- /* if x ever in domain, correct that evaluation. */
- if x_ever_in_domain {
- next_f_i[x_coset_index] = evals_over_domain[x_index_in_domain];
- }
-
- // domain definition
- let c = domain.fold(localization_param);
-
- (c, next_f_i)
- }
-}
-
-#[cfg(test)]
-pub mod tests {
- use crate::direct::DirectLDT;
- use crate::domain::Radix2CosetDomain;
- use crate::fri::prover::FRIProver;
- use ark_poly::univariate::DensePolynomial;
- use ark_poly::DenseUVPolynomial;
- use ark_std::{test_rng, UniformRand};
- use ark_test_curves::bls12_381::Fr;
-
- #[test]
- fn efficient_prover_consistency_test() {
- let degree = 32;
-
- let mut rng = test_rng();
- let poly = DensePolynomial::::rand(degree, &mut rng);
- let domain_coset = Radix2CosetDomain::new_radix2_coset(64, Fr::rand(&mut rng));
- let evaluations = domain_coset.evaluate(&poly);
-
- // fri prover should reduce its degree
- let alpha = Fr::rand(&mut rng);
- let localization = 2;
- let (expected_domain_next_round, expected_eval_next_round) =
- FRIProver::interactive_phase_single_round_naive(
- domain_coset,
- evaluations.to_vec(),
- localization,
- alpha,
- );
-
- let (actual_domain_next_round, actual_eval_next_round) =
- FRIProver::interactive_phase_single_round(
- domain_coset,
- evaluations.to_vec(),
- localization,
- alpha,
- );
-
- assert_eq!(actual_domain_next_round, expected_domain_next_round);
- assert_eq!(actual_eval_next_round, expected_eval_next_round);
- }
-
- #[test]
- fn degree_reduction_test() {
- let degree = 32;
-
- let mut rng = test_rng();
- let poly = DensePolynomial::::rand(degree, &mut rng);
- let domain_coset = Radix2CosetDomain::new_radix2_coset(64, Fr::rand(&mut rng));
- let evaluations = domain_coset.evaluate(&poly);
-
- // fri prover should reduce its degree
- let alpha = Fr::rand(&mut rng);
- let localization = 2;
- let (domain_next_round, eval_next_round) = FRIProver::interactive_phase_single_round(
- domain_coset.clone(),
- evaluations.to_vec(),
- localization,
- alpha,
- );
-
- let low_degree_poly = DirectLDT::generate_low_degree_coefficients(
- domain_next_round.clone(),
- eval_next_round.to_vec(),
- degree / (1 << localization),
- );
-
- let sampled_element = domain_next_round.element(15);
- let sampled_evaluation = eval_next_round[15];
-
- assert!(DirectLDT::verify(
- sampled_element,
- sampled_evaluation,
- &low_degree_poly
- ));
-
- // test `fold_domain`
- let fold_domain = domain_coset.fold(localization);
- assert_eq!(fold_domain, domain_next_round);
- }
-}
diff --git a/src/fri/verifier.rs b/src/fri/verifier.rs
deleted file mode 100644
index f62952b..0000000
--- a/src/fri/verifier.rs
+++ /dev/null
@@ -1,342 +0,0 @@
-use ark_std::marker::PhantomData;
-
-use crate::direct::DirectLDT;
-use crate::domain::Radix2CosetDomain;
-use crate::fri::FRIParameters;
-use ark_ff::PrimeField;
-use ark_poly::polynomial::univariate::DensePolynomial;
-use ark_poly::Polynomial;
-use ark_sponge::FieldBasedCryptographicSponge;
-use ark_std::vec::Vec;
-
-/// Implements FRI verifier.
-pub struct FRIVerifier {
- _verifier: PhantomData,
-}
-
-impl FRIVerifier {
- /// ## Step 1: Interative Phase
- /// Sample alpha in interactive phase.
- pub fn interactive_phase_single_round>(
- sponge: &mut S,
- ) -> F {
- sponge.squeeze_native_field_elements(1)[0]
- }
-
- /// ## Step 2: Sample Queried Coset
- /// Sample the coset to be queried.
- pub fn sample_coset_index>(
- sponge: &mut S,
- fri_parameters: &FRIParameters,
- ) -> usize {
- let log_num_cosets =
- fri_parameters.domain.dim() - fri_parameters.localization_parameters[0] as usize;
- // we use the fact that number of cosets is always power of two
- let rand_coset_index = le_bits_array_to_usize(&sponge.squeeze_bits(log_num_cosets));
- rand_coset_index
- }
-
- /// ## Step 2: Query Phase (Prepare Query)
- /// Prepare one query given the random coset index. The returned value `queries[i]` is the coset query
- /// of the `ith` round polynomial (including codeword but does not include final polynomial).
- /// Final polynomial is not queried. Instead, verifier will get
- /// the whole final polynomial in evaluation form, and do direct LDT.
- ///
- /// Returns the all query domains, and query coset index, final polynomial domain
- pub fn prepare_query(
- rand_coset_index: usize,
- fri_parameters: &FRIParameters,
- ) -> (Vec>, Vec, Radix2CosetDomain) {
- let num_fri_rounds = fri_parameters.localization_parameters.len();
- let mut coset_indices = Vec::new();
- let mut curr_coset_index = rand_coset_index;
- let mut queries = Vec::with_capacity(num_fri_rounds);
- let mut curr_round_domain = fri_parameters.domain;
- // sample a coset index
- for i in 0..num_fri_rounds {
- // current coset index = last coset index % (distance between coset at current round)
- // edge case: at first round, this still applies
-
- let dist_between_coset_elems =
- curr_round_domain.size() / (1 << fri_parameters.localization_parameters[i]);
- curr_coset_index = curr_coset_index % dist_between_coset_elems;
-
- coset_indices.push(curr_coset_index);
-
- let (_, query_coset) = curr_round_domain.query_position_to_coset(
- curr_coset_index,
- fri_parameters.localization_parameters[i] as usize,
- );
-
- queries.push(query_coset);
-
- // get next round coset size, and next round domain
- curr_round_domain = curr_round_domain.fold(fri_parameters.localization_parameters[i]);
- }
-
- (queries, coset_indices, curr_round_domain)
- }
-
- /// ## Step 2: Query Phase (Prepare Query)
- /// Prepare all queries given the sampled random coset indices.
- ///
- /// The first returned value `queries[i][j]` is the coset query
- /// of the `j`th round polynomial (including codeword but does not include final polynomial) for `i`th query.
- ///
- /// The second returned value `indices[i][j]` is the coset index
- /// of the `j`th round polynomial (including codeword but does not include final polynomial) for `i`th query.
- ///
- /// The last returned value `final[i]` is the final polynomial domain at round `i`.
- pub fn batch_prepare_queries(
- rand_coset_indices: &[usize],
- fri_parameters: &FRIParameters,
- ) -> (
- Vec>>,
- Vec>,
- Vec>,
- ) {
- let mut queries = Vec::with_capacity(rand_coset_indices.len());
- let mut indices = Vec::with_capacity(rand_coset_indices.len());
- let mut finals = Vec::with_capacity(rand_coset_indices.len());
-
- rand_coset_indices
- .iter()
- .map(|&i| Self::prepare_query(i, fri_parameters))
- .for_each(|(query, index, fp)| {
- queries.push(query);
- indices.push(index);
- finals.push(fp);
- });
-
- (queries, indices, finals)
- }
-
- /// ## Step 3: Decision Phase (Check query)
- /// After preparing the query, verifier get the evaluations of corresponding query. Those evaluations needs
- /// to be checked by merkle tree. Then verifier calls this method to check if polynomial sent in each round
- /// is consistent with each other, and the final polynomial is low-degree.
- ///
- /// `queries[i]` is the coset query of the `ith` round polynomial, including the codeword polynomial.
- /// `queried_evaluations` stores the result of corresponding query.
- pub fn consistency_check(
- fri_parameters: &FRIParameters,
- queried_coset_indices: &[usize],
- queries: &[Radix2CosetDomain],
- queried_evaluations: &[Vec],
- alphas: &[F],
- final_polynomial_domain: &Radix2CosetDomain,
- final_polynomial: &DensePolynomial,
- ) -> bool {
- let mut expected_next_round_eval = F::zero();
-
- debug_assert_eq!(fri_parameters.localization_parameters.len(), queries.len());
- for i in 0..queries.len() {
- expected_next_round_eval = FRIVerifier::expected_evaluation(
- &queries[i],
- queried_evaluations[i].clone(),
- alphas[i],
- );
-
- // check if current round result is consistent with next round polynomial (if next round is not final)
- if i < queries.len() - 1 {
- let next_localization_param =
- fri_parameters.localization_parameters[i + 1] as usize;
- let log_next_dist_between_coset_elems =
- fri_parameters.log_round_coset_sizes[i + 1] - next_localization_param;
- let next_intra_coset_index =
- queried_coset_indices[i] >> log_next_dist_between_coset_elems;
-
- let actual = queried_evaluations[i + 1][next_intra_coset_index];
- if expected_next_round_eval != actual {
- return false;
- }
- }
- }
-
- // check final polynomial (low degree & consistency check)
- // We assume degree_bound is power of 2.
- assert!(fri_parameters.tested_degree.is_power_of_two());
- let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum();
- let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor;
-
- let final_element_index = *queried_coset_indices.last().unwrap();
-
- assert!(
- final_polynomial.degree() <= final_poly_degree_bound as usize,
- "final polynomial degree is too large!"
- );
- DirectLDT::verify(
- final_polynomial_domain.element(final_element_index),
- expected_next_round_eval,
- &final_polynomial,
- )
- }
-
- /// ## Step 3: Decision Phase (Check query)
- /// After preparing all queries, verifier gets the evaluations of corresponding query. Those evaluations needs
- /// to be checked by merkle tree. Then verifier calls this method to check if polynomial sent in each round
- /// is consistent with each other, and the final polynomial is low-degree.
- ///
- /// * `all_queried_coset_indices[i][j]` is the `j`th round query coset index of `i`th query
- /// * `all_queries_domains[i][j]` is the `j`th round query coset of `i`th query
- /// * `all_queried_evaluations[i][j]` is a vector storing corresponding evaluations at `all_queries_domains[i][j]`
- /// * `alphas[i]` is the randomness used by the polynomial
- /// * `all_final_polynomial_domain[i]` is the final polynomial domain for `i`th query
- /// * `all_final_polynomials` is the final polynomial for `i`th query
- pub fn batch_consistency_check(
- fri_parameters: &FRIParameters,
- all_queried_coset_indices: &[Vec],
- all_queries_domains: &[Vec>],
- all_queried_evaluations: &[Vec>],
- alphas: &[F],
- all_final_polynomial_domain: &[Radix2CosetDomain],
- all_final_polynomials: &[DensePolynomial],
- ) -> bool {
- for i in 0..all_queried_coset_indices.len() {
- let result = Self::consistency_check(
- fri_parameters,
- &all_queried_coset_indices[i],
- &all_queries_domains[i],
- &all_queried_evaluations[i],
- alphas,
- &all_final_polynomial_domain[i],
- &all_final_polynomials[i],
- );
- if !result {
- return false;
- }
- }
- true
- }
-
- /// Map coset in current round to a single point in next round.
- ///
- /// Essentially, this function interpolates the polynomial and evaluate on `alpha`.
- #[inline]
- fn expected_evaluation(
- coset: &Radix2CosetDomain,
- queried_evaluations: Vec,
- alpha: F,
- ) -> F {
- let poly = coset.interpolate(queried_evaluations);
- poly.evaluate(&alpha)
- }
-}
-
-fn le_bits_array_to_usize(bits: &[bool]) -> usize {
- let mut result = 0;
- for &bit in bits {
- result += bit as usize;
- result *= 2;
- }
- result
-}
-
-#[cfg(test)]
-mod tests {
- use ark_ff::UniformRand;
- use ark_poly::univariate::DensePolynomial;
- use ark_poly::{DenseUVPolynomial, Polynomial};
- use ark_std::test_rng;
- use ark_test_curves::bls12_381::Fr;
-
- use crate::direct::DirectLDT;
- use crate::domain::Radix2CosetDomain;
- use crate::fri::prover::FRIProver;
- use crate::fri::verifier::FRIVerifier;
- use crate::fri::FRIParameters;
-
- #[test]
- fn two_rounds_fri_test() {
- // First, generate a low degree polynomial, and its evaluations.
- let mut rng = test_rng();
- let poly = DensePolynomial::rand(32, &mut rng);
- let offset = Fr::rand(&mut rng);
- let domain_input = Radix2CosetDomain::new_radix2_coset(128, offset);
- let evaluations_input = domain_input.evaluate(&poly);
-
- // Set up verifier parameter
- let fri_parameters = FRIParameters::new(32, vec![1, 2, 1], domain_input);
- let alphas: Vec<_> = (0..3).map(|_| Fr::rand(&mut rng)).collect();
-
- // prover commits all round polynomial
- let (domain_round_0, evaluations_round_0) = FRIProver::interactive_phase_single_round(
- domain_input,
- evaluations_input.clone(),
- fri_parameters.localization_parameters[0],
- alphas[0],
- );
-
- let (domain_round_1, evaluations_round_1) = FRIProver::interactive_phase_single_round(
- domain_round_0,
- evaluations_round_0.clone(),
- fri_parameters.localization_parameters[1],
- alphas[1],
- );
-
- let (expected_domain_final, evaluations_final) = FRIProver::interactive_phase_single_round(
- domain_round_1,
- evaluations_round_1.clone(),
- fri_parameters.localization_parameters[2],
- alphas[2],
- );
-
- // verifier prepare queries
- let rand_coset_index = 31;
- let (query_cosets, query_indices, domain_final) =
- FRIVerifier::prepare_query(rand_coset_index, &fri_parameters);
- assert_eq!(query_indices.len(), 3);
- assert_eq!(domain_final, expected_domain_final);
-
- // prover generate answers to queries
- let (indices, qi) = domain_input.query_position_to_coset(
- query_indices[0],
- fri_parameters.localization_parameters[0] as usize,
- );
- let answer_input: Vec<_> = indices.iter().map(|&i| evaluations_input[i]).collect();
- assert_eq!(qi, query_cosets[0]);
-
- let (indices, q0) = domain_round_0.query_position_to_coset(
- query_indices[1],
- fri_parameters.localization_parameters[1] as usize,
- );
- let answer_round_0: Vec<_> = indices.iter().map(|&i| evaluations_round_0[i]).collect();
- assert_eq!(q0, query_cosets[1]);
-
- let (indices, q1) = domain_round_1.query_position_to_coset(
- query_indices[2],
- fri_parameters.localization_parameters[2] as usize,
- );
- let answer_round_1: Vec<_> = indices.iter().map(|&i| evaluations_round_1[i]).collect();
- assert_eq!(q1, query_cosets[2]);
-
- // sanity check: answer_round_i interpolate version contained in answer_round_i+1
- assert!(answer_round_0.contains(&qi.interpolate(answer_input.clone()).evaluate(&alphas[0])));
- assert!(
- answer_round_1.contains(&q0.interpolate(answer_round_0.clone()).evaluate(&alphas[1]))
- );
- assert!(evaluations_final
- .contains(&q1.interpolate(answer_round_1.clone()).evaluate(&alphas[2])));
-
- let total_shrink_factor: u64 = fri_parameters.localization_parameters.iter().sum();
- let final_poly_degree_bound = fri_parameters.tested_degree >> total_shrink_factor;
- let final_polynomial = DirectLDT::generate_low_degree_coefficients(
- domain_final,
- evaluations_final,
- final_poly_degree_bound as usize,
- );
-
- // verifier verifies consistency
- let result = FRIVerifier::consistency_check(
- &fri_parameters,
- &query_indices,
- &query_cosets,
- &vec![answer_input, answer_round_0, answer_round_1],
- &alphas,
- &domain_final,
- &final_polynomial,
- );
-
- assert!(result)
- }
-}
diff --git a/src/ldt.rs b/src/ldt.rs
new file mode 100644
index 0000000..9d1c814
--- /dev/null
+++ b/src/ldt.rs
@@ -0,0 +1,23 @@
+use ark_ff::FftField;
+
+pub trait Prover {
+ type Proof;
+ type ProverConfig;
+ type Witness;
+ fn new(prover_config: Self::ProverConfig) -> Self;
+ fn prove(&self, witness: &Self::Witness) -> Self::Proof;
+}
+pub trait Verifier {
+ type Statement;
+ type Proof;
+ type VerifierConfig;
+ fn new(verifier_config: Self::VerifierConfig) -> Self;
+ fn verify(&self, commitment: &Self::Statement, proof: &Self::Proof) -> bool;
+}
+pub trait LowDegreeTest {
+ type LDTConfig;
+ type Proof;
+ type Prover;
+ type Verifier;
+ fn new(ldt_config: Self::LDTConfig) -> (Self::Prover, Self::Verifier);
+}
diff --git a/src/lib.rs b/src/lib.rs
index 2154ce1..81ea8f6 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,25 +1,10 @@
#![cfg_attr(not(feature = "std"), no_std)]
-
-//! A crate for low-degree tests.
-#![deny(
- future_incompatible,
- missing_docs,
- non_shorthand_field_patterns,
- renamed_and_removed_lints,
- rust_2018_idioms,
- stable_features,
- trivial_casts,
- trivial_numeric_casts,
- unused,
- variant_size_differences,
- warnings
-)]
-#![forbid(unsafe_code)]
-
-/// Direct low-degree tests
pub mod direct;
-
-/// Domain represented as coset.
pub mod domain;
-/// Implementations for FRI Protocol
-pub mod fri;
+pub mod ldt;
+pub mod poly_utils;
+pub mod statement;
+pub mod stir;
+pub mod test_helpers;
+pub mod utils;
+pub mod witness;
diff --git a/src/poly_utils/bs08.rs b/src/poly_utils/bs08.rs
new file mode 100644
index 0000000..4711539
--- /dev/null
+++ b/src/poly_utils/bs08.rs
@@ -0,0 +1,154 @@
+use ark_ff::Field;
+use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial, Polynomial};
+
+#[cfg(not(feature = "std"))]
+use ark_std::{vec, vec::Vec};
+
+use crate::utils;
+
+pub struct BivariatePolynomial(pub Vec>);
+
+// Takes a polynomial and interprets it as a matrix of coefficients
+// this exactly corresponds to computing the BS08 bivariate polynomial with
+// q(X) = X^cols
+pub fn to_coefficient_matrix(
+ f: &DensePolynomial,
+ rows: usize,
+ cols: usize,
+) -> BivariatePolynomial {
+ if f.degree() + 1 > rows * cols {
+ panic!("Degree of polynomial is too large for matrix");
+ }
+
+ let mut matrix = vec![vec![F::ZERO; cols]; rows];
+
+ for (i, coeff) in f.coeffs.iter().enumerate() {
+ matrix[i / cols][i % cols] = *coeff;
+ }
+
+ BivariatePolynomial(matrix)
+}
+
+impl BivariatePolynomial
+where
+ F: Field,
+{
+ pub fn degree_x(&self) -> usize {
+ self.rows() - 1
+ }
+
+ pub fn rows(&self) -> usize {
+ self.0.len()
+ }
+
+ pub fn degree_y(&self) -> usize {
+ self.cols() - 1
+ }
+
+ pub fn cols(&self) -> usize {
+ self.0[0].len()
+ }
+
+ pub fn evaluate(&self, x: F, y: F) -> F {
+ let mut res = F::zero();
+ for row in 0..self.rows() {
+ for col in 0..self.cols() {
+ res += self.0[row][col] * x.pow([row as u64]) * y.pow([col as u64]);
+ }
+ }
+ res
+ }
+
+ pub fn fold_by_col(&self, alpha: F) -> DensePolynomial {
+ let transposed = utils::transpose(self.0.clone());
+
+ let mut res = DensePolynomial::from_coefficients_vec(vec![]);
+
+ let mut pow = F::ONE;
+ for c in transposed {
+ res += &DensePolynomial::from_coefficients_vec(c.iter().map(|f| pow * f).collect());
+ pow *= alpha;
+ }
+
+ res
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::test_helpers::fields::Field64 as TestField;
+ use ark_ff::AdditiveGroup;
+ use ark_std::rand::Rng;
+
+ fn test_bivariate(
+ poly: &DensePolynomial,
+ matrix: &BivariatePolynomial,
+ cols: usize,
+ ) {
+ let mut rng = ark_std::test_rng();
+
+ let point: TestField = rng.gen();
+ assert_eq!(
+ poly.evaluate(&point),
+ matrix.evaluate(point.pow([cols as u64]), point)
+ );
+ }
+
+ #[test]
+ fn neat_example() {
+ let poly = DensePolynomial::from_coefficients_vec(vec![
+ TestField::from(0),
+ TestField::from(1),
+ TestField::from(2),
+ TestField::from(3),
+ TestField::from(4),
+ TestField::from(5),
+ ]);
+ let matrix = to_coefficient_matrix(&poly, 3, 2);
+
+ for r in 0..3 {
+ for c in 0..2 {
+ assert_eq!(matrix.0[r][c], TestField::from((2 * r + c) as u8));
+ }
+ }
+ test_bivariate(&poly, &matrix, 2);
+ }
+
+ #[test]
+ fn shorter_than_expected() {
+ let poly = DensePolynomial::from_coefficients_vec(vec![
+ TestField::from(0),
+ TestField::from(1),
+ TestField::from(2),
+ TestField::from(3),
+ TestField::from(4),
+ TestField::from(5),
+ ]);
+ let matrix = to_coefficient_matrix(&poly, 4, 2);
+
+ for r in 0..3 {
+ for c in 0..2 {
+ assert_eq!(matrix.0[r][c], TestField::from((2 * r + c) as u8));
+ }
+ }
+ for c in 0..2 {
+ assert_eq!(matrix.0[3][c], TestField::ZERO);
+ }
+ test_bivariate(&poly, &matrix, 2);
+ }
+
+ #[test]
+ #[should_panic]
+ fn longer_than_expected() {
+ let poly = DensePolynomial::from_coefficients_vec(vec![
+ TestField::from(0),
+ TestField::from(1),
+ TestField::from(2),
+ TestField::from(3),
+ TestField::from(4),
+ TestField::from(5),
+ ]);
+ let _matrix = to_coefficient_matrix(&poly, 2, 2);
+ }
+}
diff --git a/src/poly_utils/folding.rs b/src/poly_utils/folding.rs
new file mode 100644
index 0000000..75c506e
--- /dev/null
+++ b/src/poly_utils/folding.rs
@@ -0,0 +1,65 @@
+use ark_ff::Field;
+use ark_poly::{univariate::DensePolynomial, Polynomial};
+
+#[cfg(not(feature = "std"))]
+use ark_std::vec::Vec;
+
+use crate::poly_utils::interpolation;
+
+use super::bs08;
+
+pub fn poly_fold(
+ f: &DensePolynomial,
+ folding_factor: usize,
+ folding_randomness: F,
+) -> DensePolynomial {
+ let degree = f.degree() + 1;
+ let q_poly = bs08::to_coefficient_matrix(f, degree.div_ceil(folding_factor), folding_factor);
+ q_poly.fold_by_col(folding_randomness)
+}
+
+// f_answers is a vector containg B_l and f(B_l) for l the evaluation point
+// Recall that B_l has x \in B_l \iff x^k = l
+pub fn fold(f_answers: Vec<(F, F)>, folding_factor: usize, folding_randomness: F) -> F {
+ assert_eq!(f_answers.len(), folding_factor);
+ interpolation::evaluate_interpolation(f_answers.iter(), folding_randomness)
+}
+
+#[cfg(test)]
+mod tests {
+ use ark_ff::FftField;
+ use ark_poly::DenseUVPolynomial;
+
+ use super::*;
+ use crate::test_helpers::fields::Field64 as TestField;
+
+ #[test]
+ fn test_folding() {
+ let mut rng = ark_std::test_rng();
+ let poly = DensePolynomial::rand(16, &mut rng);
+
+ let folding_factor = 2;
+ let folding_randomness = TestField::from(5);
+
+ let poly_fold = poly_fold(&poly, folding_factor, folding_randomness);
+
+ let root_of_unity = TestField::get_root_of_unity(256).unwrap();
+
+ let evalpoint = root_of_unity.pow([folding_factor as u64]);
+ let beta_l = &[root_of_unity, root_of_unity.pow([1 + 128])];
+
+ for beta in beta_l {
+ assert_eq!(beta.pow([folding_factor as u64]), evalpoint,);
+ }
+
+ let f_answers = beta_l
+ .iter()
+ .map(|x| (*x, poly.evaluate(x)))
+ .collect::>();
+
+ assert_eq!(
+ poly_fold.evaluate(&evalpoint),
+ fold(f_answers, folding_factor, folding_randomness)
+ );
+ }
+}
diff --git a/src/poly_utils/interpolation.rs b/src/poly_utils/interpolation.rs
new file mode 100644
index 0000000..028e693
--- /dev/null
+++ b/src/poly_utils/interpolation.rs
@@ -0,0 +1,268 @@
+use ark_ff::{batch_inversion, FftField, Field};
+use ark_poly::{
+ univariate::DensePolynomial, DenseUVPolynomial, Evaluations, Polynomial, Radix2EvaluationDomain,
+};
+
+#[cfg(not(feature = "std"))]
+use ark_std::{vec, vec::Vec};
+
+use crate::utils;
+
+// Computes a polynomial that vanishes on points
+pub fn vanishing_poly<'a, F: Field>(points: impl IntoIterator- ) -> DensePolynomial {
+ // Compute the denominator (which is \prod_a(x - a))
+ let mut vanishing_poly: DensePolynomial<_> =
+ DensePolynomial::from_coefficients_slice(&[F::ONE]);
+ for a in points {
+ vanishing_poly =
+ vanishing_poly.naive_mul(&DensePolynomial::from_coefficients_slice(&[-*a, F::ONE]));
+ }
+ vanishing_poly
+}
+
+// Computes a polynomial that interpolates the given points with the given answers
+pub fn naive_interpolation<'a, F: Field>(
+ points: impl IntoIterator
- ,
+) -> DensePolynomial {
+ let points: Vec<_> = points.into_iter().collect();
+ let vanishing_poly = vanishing_poly(points.iter().map(|(a, _)| a));
+
+ // Compute the ans polynomial (this is just a naive interpolation)
+ let mut ans_polynomial = DensePolynomial::from_coefficients_slice(&[]);
+ for (a, eval) in points.iter() {
+ // Computes the vanishing (apart from x - a)
+ let vanishing_adjusted =
+ &vanishing_poly / &DensePolynomial::from_coefficients_slice(&[-*a, F::ONE]);
+
+ // Now, we can scale to get the right weigh
+ let scale_factor = *eval / vanishing_adjusted.evaluate(a);
+ ans_polynomial = ans_polynomial
+ + DensePolynomial::from_coefficients_vec(
+ vanishing_adjusted
+ .iter()
+ .map(|x| *x * scale_factor)
+ .collect(),
+ );
+ }
+ ans_polynomial
+}
+
+// Given a generator and a coset offset, computes the interpolating offset
+pub fn fft_interpolate_naive<'a, F: FftField>(
+ generator: F,
+ coset_offset: F,
+ points: impl IntoIterator
- ,
+) -> DensePolynomial {
+ let points: Vec<_> = points.into_iter().cloned().collect();
+ let folding_factor = points.len();
+ assert!(utils::is_power_of_two(folding_factor));
+
+ let size_as_field_element = F::from(folding_factor as u64);
+
+ // Do some batch inversion
+ let mut to_invert = vec![size_as_field_element, coset_offset, generator];
+ batch_inversion(&mut to_invert);
+ let size_inv = to_invert[0];
+ let coset_offset_inv = to_invert[1];
+ let generator_inv = to_invert[2];
+
+ let domain = Radix2EvaluationDomain {
+ size: folding_factor as u64,
+ log_size_of_group: folding_factor.ilog2(),
+ size_as_field_element,
+ size_inv,
+ group_gen: generator,
+ group_gen_inv: generator_inv,
+ offset: coset_offset,
+ offset_inv: coset_offset_inv,
+ offset_pow_size: coset_offset.pow([folding_factor as u64]),
+ };
+
+ let evaluations = Evaluations::from_vec_and_domain(points, domain);
+
+ evaluations.interpolate()
+}
+
+// Given a generator and a coset offset, computes the interpolating offset
+// Requires to be given the inversion of the generator and coset offset (and thus can be more
+// efficient)
+pub fn fft_interpolate<'a, F: FftField>(
+ generator: F,
+ coset_offset: F,
+ generator_inv: F,
+ coset_offset_inv: F,
+ size_inv: F,
+ points: impl IntoIterator
- ,
+) -> DensePolynomial {
+ let points: Vec<_> = points.into_iter().cloned().collect();
+ let folding_factor = points.len();
+ assert!(utils::is_power_of_two(folding_factor));
+
+ let size_as_field_element = F::from(folding_factor as u64);
+
+ let domain = Radix2EvaluationDomain {
+ size: folding_factor as u64,
+ log_size_of_group: folding_factor.ilog2(),
+ size_as_field_element,
+ size_inv,
+ group_gen: generator,
+ group_gen_inv: generator_inv,
+ offset: coset_offset,
+ offset_inv: coset_offset_inv,
+ offset_pow_size: coset_offset.pow([folding_factor as u64]),
+ };
+
+ let evaluations = Evaluations::from_vec_and_domain(points, domain);
+
+ evaluations.interpolate()
+}
+
+// Computes a polynomial that interpolates the given points with the given answers
+pub fn evaluate_interpolation<'a, F: Field>(
+ points: impl IntoIterator
- ,
+ point: F,
+) -> F {
+ let points = points.into_iter().collect::>();
+
+ for (p, a) in points.iter() {
+ if p == &point {
+ return *a;
+ }
+ }
+
+ let denominators: Vec<_> = points
+ .iter()
+ .map(|(p, _)| p)
+ .enumerate()
+ .map(|(i, xi)| {
+ points
+ .iter()
+ .map(|(p, _)| p)
+ .enumerate()
+ .filter(|(j, _)| &i != j)
+ .map(|(_, xj)| *xi - *xj)
+ .product::()
+ })
+ .collect();
+
+ // Do a batch inversion
+ let mut denominators = points
+ .iter()
+ .zip(denominators)
+ .map(|((xi, _), d)| d * (point - xi))
+ .collect::>();
+ batch_inversion(&mut denominators);
+
+ let res: F = points
+ .iter()
+ .zip(denominators)
+ .map(|((_, a), d)| *a * d)
+ .sum();
+
+ res * points.iter().map(|(xi, _)| point - xi).product::