Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 22 additions & 15 deletions src/backends/plonky2/basetypes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,38 +68,45 @@ pub static DEFAULT_VD_SET: LazyLock<VDSet> = LazyLock::new(|| {
#[derive(Clone, Debug)]
pub struct VDSet {
root: Hash,
// (verifier_data, merkleproof)
// (verifier_data's hash, merkleproof)
proofs_map: HashMap<HashOut<F>, MerkleClaimAndProof>,
}
impl VDSet {
/// builds the verifier_datas tree, and returns the root and the proofs
pub fn new(tree_depth: usize, vds: &[VerifierOnlyCircuitData]) -> Result<Self> {
// first of all, sort the vds, so that each set of verifier_datas gets
// the same root
let vds: Vec<&VerifierOnlyCircuitData> = vds
// compute the verifier_data's hashes
let vds_hashes: Vec<HashOut<F>> = vds
.iter()
.sorted_by_key(|vd| RawValue(vd.circuit_digest.elements))
.map(crate::backends::plonky2::recursion::circuit::hash_verifier_data)
.collect::<Vec<_>>();

// before using the hash values, sort them, so that each set of
// verifier_datas gets the same VDSet root
let vds_hashes: Vec<&HashOut<F>> = vds_hashes
.iter()
.sorted_by_key(|vd| RawValue(vd.elements))
.collect::<Vec<_>>();

let array = Array::new(
tree_depth,
vds.iter()
.map(|vd| Value::from(RawValue(vd.circuit_digest.elements)))
vds_hashes
.iter()
.map(|vd| Value::from(RawValue(vd.elements)))
.collect(),
)?;

let root = array.commitment();
let mut proofs_map = HashMap::<HashOut<F>, MerkleClaimAndProof>::new();

for (i, vd) in vds.iter().enumerate() {
for (i, vd) in vds_hashes.iter().enumerate() {
let (value, proof) = array.prove(i)?;
let p = MerkleClaimAndProof {
root,
key: RawValue::from(i as i64),
value: value.raw(),
proof,
};
proofs_map.insert(vd.circuit_digest, p);
proofs_map.insert(**vd, p);
}
Ok(Self { root, proofs_map })
}
Expand All @@ -113,12 +120,12 @@ impl VDSet {
) -> Result<Vec<MerkleClaimAndProof>> {
let mut proofs: Vec<MerkleClaimAndProof> = vec![];
for vd in vds {
let p =
self.proofs_map
.get(&vd.circuit_digest)
.ok_or(crate::middleware::Error::custom(
"verifier_data not found in VDSet".to_string(),
))?;
let p = self
.proofs_map
.get(&crate::backends::plonky2::recursion::circuit::hash_verifier_data(vd))
.ok_or(crate::middleware::Error::custom(
"verifier_data not found in VDSet".to_string(),
))?;
proofs.push(p.clone());
}
Ok(proofs)
Expand Down
51 changes: 49 additions & 2 deletions src/backends/plonky2/recursion/circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@ use plonky2::{
self,
field::{extension::quintic::QuinticExtension, types::Field},
gates::{gate::GateRef, noop::NoopGate},
hash::hash_types::HashOutTarget,
hash::{
hash_types::{HashOut, HashOutTarget},
poseidon::PoseidonHash,
},
iop::{
target::Target,
witness::{PartialWitness, WitnessWrite},
Expand All @@ -24,6 +27,7 @@ use plonky2::{
CircuitConfig, CircuitData, CommonCircuitData, ProverCircuitData, VerifierCircuitData,
VerifierCircuitTarget, VerifierOnlyCircuitData,
},
config::Hasher,
proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget},
},
util::log2_ceil,
Expand Down Expand Up @@ -201,7 +205,18 @@ impl<I: InnerCircuit> RecursiveCircuit<I> {
let verified_proofs = (0..arity)
.map(|i| VerifiedProofTarget {
public_inputs: proofs_targ[i].public_inputs.clone(),
verifier_data_hash: verifier_datas_targ[i].circuit_digest,
// note: here we're hashing the verifier_data as Hash(vd.circuit_digest,
// vd.constant_sigmas_cap), despite the circuit_digest is already a hash containing
// the constant_sigmas_cap. Conceptually we would use the circuit_digest as the hash
// of the verifier_data, but unfortunately, the recursion verification circuit does
// not ensure this link. Alternatively we could calculate an modified
// circuit_digest, hashing as in the original plonky2's circuit_digest but
// additionally checking it in-circuit. But since in terms of circuit costs would
// require a hash (with similar amount of elements), the approach that we do is take
// the already computed circuit_digest and hash it together with the
// constant_sigmas_cap, doing the same computation in-circuit, obtaining a new hash
// that we use to represent the verifier_data.
verifier_data_hash: hash_verifier_data_gadget(builder, &verifier_datas_targ[i]),
})
.collect_vec();

Expand Down Expand Up @@ -478,6 +493,38 @@ pub fn pad_circuit(builder: &mut CircuitBuilder<F, D>, common_data: &CommonCircu
}
}

fn hash_verifier_data_gadget(
builder: &mut CircuitBuilder<F, D>,
verifier_data: &VerifierCircuitTarget,
) -> HashOutTarget {
let f: Vec<Target> = [
verifier_data.circuit_digest.elements.to_vec(),
verifier_data
.constants_sigmas_cap
.0
.iter()
.flat_map(|e| e.elements)
.collect(),
]
.concat();
builder.hash_n_to_hash_no_pad::<PoseidonHash>(f)
}

// compatible with hash_verifier_data_gadget.
pub(crate) fn hash_verifier_data(verifier_only_data: &VerifierOnlyCircuitData<C, D>) -> HashOut<F> {
let f: Vec<F> = [
verifier_only_data.circuit_digest.elements.to_vec(),
verifier_only_data
.constants_sigmas_cap
.0
.iter()
.flat_map(|e| e.elements)
.collect(),
]
.concat();
PoseidonHash::hash_no_pad(&f)
}

#[cfg(test)]
mod tests {
use std::time::Instant;
Expand Down
Loading