diff --git a/src/backends/plonky2/basetypes.rs b/src/backends/plonky2/basetypes.rs index d2b48500..5c32189c 100644 --- a/src/backends/plonky2/basetypes.rs +++ b/src/backends/plonky2/basetypes.rs @@ -68,30 +68,37 @@ pub static DEFAULT_VD_SET: LazyLock = LazyLock::new(|| { #[derive(Clone, Debug)] pub struct VDSet { root: Hash, - // (verifier_data, merkleproof) + // (verifier_data's hash, merkleproof) proofs_map: HashMap, MerkleClaimAndProof>, } impl VDSet { /// builds the verifier_datas tree, and returns the root and the proofs pub fn new(tree_depth: usize, vds: &[VerifierOnlyCircuitData]) -> Result { - // first of all, sort the vds, so that each set of verifier_datas gets - // the same root - let vds: Vec<&VerifierOnlyCircuitData> = vds + // compute the verifier_data's hashes + let vds_hashes: Vec> = vds .iter() - .sorted_by_key(|vd| RawValue(vd.circuit_digest.elements)) + .map(crate::backends::plonky2::recursion::circuit::hash_verifier_data) + .collect::>(); + + // before using the hash values, sort them, so that each set of + // verifier_datas gets the same VDSet root + let vds_hashes: Vec<&HashOut> = vds_hashes + .iter() + .sorted_by_key(|vd| RawValue(vd.elements)) .collect::>(); let array = Array::new( tree_depth, - vds.iter() - .map(|vd| Value::from(RawValue(vd.circuit_digest.elements))) + vds_hashes + .iter() + .map(|vd| Value::from(RawValue(vd.elements))) .collect(), )?; let root = array.commitment(); let mut proofs_map = HashMap::, MerkleClaimAndProof>::new(); - for (i, vd) in vds.iter().enumerate() { + for (i, vd) in vds_hashes.iter().enumerate() { let (value, proof) = array.prove(i)?; let p = MerkleClaimAndProof { root, @@ -99,7 +106,7 @@ impl VDSet { value: value.raw(), proof, }; - proofs_map.insert(vd.circuit_digest, p); + proofs_map.insert(**vd, p); } Ok(Self { root, proofs_map }) } @@ -113,12 +120,12 @@ impl VDSet { ) -> Result> { let mut proofs: Vec = vec![]; for vd in vds { - let p = - self.proofs_map - .get(&vd.circuit_digest) - .ok_or(crate::middleware::Error::custom( - "verifier_data not found in VDSet".to_string(), - ))?; + let p = self + .proofs_map + .get(&crate::backends::plonky2::recursion::circuit::hash_verifier_data(vd)) + .ok_or(crate::middleware::Error::custom( + "verifier_data not found in VDSet".to_string(), + ))?; proofs.push(p.clone()); } Ok(proofs) diff --git a/src/backends/plonky2/recursion/circuit.rs b/src/backends/plonky2/recursion/circuit.rs index fd01a8aa..664f7e6e 100644 --- a/src/backends/plonky2/recursion/circuit.rs +++ b/src/backends/plonky2/recursion/circuit.rs @@ -13,7 +13,10 @@ use plonky2::{ self, field::{extension::quintic::QuinticExtension, types::Field}, gates::{gate::GateRef, noop::NoopGate}, - hash::hash_types::HashOutTarget, + hash::{ + hash_types::{HashOut, HashOutTarget}, + poseidon::PoseidonHash, + }, iop::{ target::Target, witness::{PartialWitness, WitnessWrite}, @@ -24,6 +27,7 @@ use plonky2::{ CircuitConfig, CircuitData, CommonCircuitData, ProverCircuitData, VerifierCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, }, + config::Hasher, proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget}, }, util::log2_ceil, @@ -201,7 +205,18 @@ impl RecursiveCircuit { let verified_proofs = (0..arity) .map(|i| VerifiedProofTarget { public_inputs: proofs_targ[i].public_inputs.clone(), - verifier_data_hash: verifier_datas_targ[i].circuit_digest, + // note: here we're hashing the verifier_data as Hash(vd.circuit_digest, + // vd.constant_sigmas_cap), despite the circuit_digest is already a hash containing + // the constant_sigmas_cap. Conceptually we would use the circuit_digest as the hash + // of the verifier_data, but unfortunately, the recursion verification circuit does + // not ensure this link. Alternatively we could calculate an modified + // circuit_digest, hashing as in the original plonky2's circuit_digest but + // additionally checking it in-circuit. But since in terms of circuit costs would + // require a hash (with similar amount of elements), the approach that we do is take + // the already computed circuit_digest and hash it together with the + // constant_sigmas_cap, doing the same computation in-circuit, obtaining a new hash + // that we use to represent the verifier_data. + verifier_data_hash: hash_verifier_data_gadget(builder, &verifier_datas_targ[i]), }) .collect_vec(); @@ -478,6 +493,38 @@ pub fn pad_circuit(builder: &mut CircuitBuilder, common_data: &CommonCircu } } +fn hash_verifier_data_gadget( + builder: &mut CircuitBuilder, + verifier_data: &VerifierCircuitTarget, +) -> HashOutTarget { + let f: Vec = [ + verifier_data.circuit_digest.elements.to_vec(), + verifier_data + .constants_sigmas_cap + .0 + .iter() + .flat_map(|e| e.elements) + .collect(), + ] + .concat(); + builder.hash_n_to_hash_no_pad::(f) +} + +// compatible with hash_verifier_data_gadget. +pub(crate) fn hash_verifier_data(verifier_only_data: &VerifierOnlyCircuitData) -> HashOut { + let f: Vec = [ + verifier_only_data.circuit_digest.elements.to_vec(), + verifier_only_data + .constants_sigmas_cap + .0 + .iter() + .flat_map(|e| e.elements) + .collect(), + ] + .concat(); + PoseidonHash::hash_no_pad(&f) +} + #[cfg(test)] mod tests { use std::time::Instant;