Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: impl zkvm support for pacaya #526

Merged
merged 12 commits into from
Apr 7, 2025
30 changes: 22 additions & 8 deletions host/src/server/api/v3/proof/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ use crate::{
api::v3::{ProofResponse, Status},
handler::prove_many,
prove_aggregation,
utils::{is_zk_any_request, to_v3_status},
utils::{
draw_for_zk_any_batch_request, fulfill_sp1_params, is_zk_any_request, to_v3_status,
},
},
};
use axum::{extract::State, routing::post, Json, Router};
Expand Down Expand Up @@ -39,22 +41,34 @@ use utoipa::OpenApi;
/// - risc0 - uses the risc0 prover
async fn batch_handler(
State(actor): State<Actor>,
Json(batch_request_opt): Json<Value>,
Json(mut batch_request_opt): Json<Value>,
) -> HostResult<Status> {
if is_zk_any_request(&batch_request_opt) {
return Ok(Status::Ok {
proof_type: ProofType::Native,
data: ProofResponse::Status {
status: TaskStatus::ZKAnyNotDrawn,
},
});
fulfill_sp1_params(&mut batch_request_opt);
}

let batch_request = {
// Override the existing proof request config from the config file and command line
// options with the request from the client, and convert to a BatchProofRequest.
let mut opts = serde_json::to_value(actor.default_request_config())?;
merge(&mut opts, &batch_request_opt);

// For zk_any request, draw zk proof type based on the block hash.
if is_zk_any_request(&opts) {
tracing::info!("bilibili is_zk_any");
match draw_for_zk_any_batch_request(&actor, &opts).await? {
Some(proof_type) => opts["proof_type"] = serde_json::to_value(proof_type).unwrap(),
None => {
return Ok(Status::Ok {
proof_type: ProofType::Native,
data: ProofResponse::Status {
status: TaskStatus::ZKAnyNotDrawn,
},
});
}
}
}

let batch_request_opt: BatchProofRequestOpt = serde_json::from_value(opts)?;
let batch_request: BatchProofRequest = batch_request_opt.try_into()?;

Expand Down
27 changes: 27 additions & 0 deletions host/src/server/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,33 @@ pub async fn draw_for_zk_any_request(
Ok(actor.draw(&blockhash))
}

pub async fn draw_for_zk_any_batch_request(
actor: &Actor,
batch_proof_request_opt: &Value,
) -> HostResult<Option<ProofType>> {
let l1_network =
batch_proof_request_opt["l1_network"]
.as_str()
.ok_or(RaikoError::InvalidRequestConfig(
"Missing network".to_string(),
))?;
let batches =
batch_proof_request_opt["batches"]
.as_array()
.ok_or(RaikoError::InvalidRequestConfig(
"Missing batches".to_string(),
))?;
let first_batch = batches.first().ok_or(RaikoError::InvalidRequestConfig(
"batches is empty".to_string(),
))?;
let l1_inclusion_block_number = first_batch["l1_inclusion_block_number"].as_u64().ok_or(
RaikoError::InvalidRequestConfig("Missing l1_inclusion_block_number".to_string()),
)?;
let (_, blockhash) =
get_task_data(&l1_network, l1_inclusion_block_number, actor.chain_specs()).await?;
Ok(actor.draw(&blockhash))
}

pub fn fulfill_sp1_params(req: &mut Value) {
let zk_any_opts = req["zk_any"].as_object().clone();
let sp1_recursion = match zk_any_opts {
Expand Down
2 changes: 1 addition & 1 deletion provers/risc0/builder/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use std::path::PathBuf;
fn main() {
let pipeline = Risc0Pipeline::new("provers/risc0/guest", "release");
pipeline.bins(
&["risc0-guest", "risc0-aggregation"],
&["risc0-guest", "risc0-aggregation", "risc0-batch"],
"provers/risc0/driver/src/methods",
);
#[cfg(feature = "test")]
Expand Down
5 changes: 3 additions & 2 deletions provers/risc0/driver/src/bonsai.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::{
methods::risc0_guest::RISC0_GUEST_ID,
snarks::{stark2snark, verify_groth16_from_snark_receipt},
Risc0Response,
};
Expand Down Expand Up @@ -313,8 +312,10 @@ pub async fn bonsai_stark_to_snark(
stark_uuid: String,
stark_receipt: Receipt,
input: B256,
elf: &[u8],
) -> ProverResult<Risc0Response> {
let image_id = Digest::from(RISC0_GUEST_ID);
let image_id = risc0_zkvm::compute_image_id(elf)
.map_err(|e| ProverError::GuestError(format!("Failed to compute image id: {e:?}")))?;
let (snark_uuid, snark_receipt) = stark2snark(
image_id,
stark_uuid.clone(),
Expand Down
82 changes: 68 additions & 14 deletions provers/risc0/driver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
#[cfg(feature = "bonsai-auto-scaling")]
use crate::bonsai::auto_scaling::shutdown_bonsai;
use crate::{
methods::risc0_aggregation::RISC0_AGGREGATION_ELF,
methods::risc0_guest::{RISC0_GUEST_ELF, RISC0_GUEST_ID},
methods::risc0_aggregation::RISC0_AGGREGATION_ELF, methods::risc0_batch::RISC0_BATCH_ELF,
methods::risc0_guest::RISC0_GUEST_ELF,
};
use alloy_primitives::{hex::ToHexExt, B256};
use bonsai::{cancel_proof, maybe_prove};
Expand All @@ -18,8 +18,10 @@ use raiko_lib::{
prover::{IdStore, IdWrite, Proof, ProofKey, Prover, ProverConfig, ProverError, ProverResult},
};
use risc0_zkvm::{
compute_image_id, default_prover, serde::to_vec, sha::Digestible, ExecutorEnv, ProverOpts,
Receipt,
compute_image_id, default_prover,
serde::to_vec,
sha::{Digest, Digestible},
ExecutorEnv, ProverOpts, Receipt,
};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
Expand Down Expand Up @@ -93,7 +95,7 @@ impl Prover for Risc0Prover {
.await?;

let proof_gen_result = if config.snark && config.bonsai {
bonsai::bonsai_stark_to_snark(uuid, receipt, output.hash)
bonsai::bonsai_stark_to_snark(uuid, receipt, output.hash, RISC0_GUEST_ELF)
.await
.map(|r0_response| r0_response.into())
.map_err(|e| ProverError::GuestError(e.to_string()))
Expand Down Expand Up @@ -149,11 +151,16 @@ impl Prover for Risc0Prover {
.iter()
.map(|proof| proof.input.unwrap())
.collect::<Vec<_>>();

let input_proof_hex_str = input.proofs[0].proof.as_ref().unwrap();
let input_proof_bytes = hex::decode(&input_proof_hex_str[2..]).unwrap();
let input_image_id_bytes: [u8; 32] = input_proof_bytes[32..64].try_into().unwrap();
let input_proof_image_id = Digest::from(input_image_id_bytes);
let input = ZkAggregationGuestInput {
image_id: RISC0_GUEST_ID,
image_id: input_proof_image_id.as_words().try_into().unwrap(),
block_inputs,
};
info!("Start aggregate proofs");

// add_assumption makes the receipt to be verified available to the prover.
let env = {
let mut env = ExecutorEnv::builder();
Expand All @@ -173,10 +180,9 @@ impl Prover for Risc0Prover {
"Generate aggregation receipt journal: {:?}",
alloy_primitives::hex::encode_prefixed(receipt.journal.bytes.clone())
);
let block_proof_image_id = compute_image_id(RISC0_GUEST_ELF).unwrap();
let aggregation_image_id = compute_image_id(RISC0_AGGREGATION_ELF).unwrap();
let proof_data = snarks::verify_aggregation_groth16_proof(
block_proof_image_id,
input_proof_image_id,
aggregation_image_id,
receipt.clone(),
)
Expand Down Expand Up @@ -223,12 +229,60 @@ impl Prover for Risc0Prover {

async fn batch_run(
&self,
_input: GuestBatchInput,
_output: &GuestBatchOutput,
_config: &ProverConfig,
_store: Option<&mut dyn IdWrite>,
input: GuestBatchInput,
output: &GuestBatchOutput,
config: &ProverConfig,
id_store: Option<&mut dyn IdWrite>,
) -> ProverResult<Proof> {
unimplemented!();
let mut id_store = id_store;
let config = Risc0Param::deserialize(config.get("risc0").unwrap()).unwrap();
let proof_key = (
input.taiko.chain_spec.chain_id,
input.taiko.batch_id,
output.hash,
ProofType::Risc0 as u8,
);

let encoded_input = to_vec(&input).expect("Could not serialize proving input!");

let (uuid, receipt) = maybe_prove::<GuestBatchInput, B256>(
&config,
encoded_input,
RISC0_BATCH_ELF,
&output.hash,
(Vec::<Receipt>::new(), Vec::new()),
proof_key,
&mut id_store,
)
.await?;

let proof_gen_result = if config.snark && config.bonsai {
bonsai::bonsai_stark_to_snark(uuid, receipt, output.hash, RISC0_BATCH_ELF)
.await
.map(|r0_response| r0_response.into())
.map_err(|e| ProverError::GuestError(e.to_string()))
} else {
if !config.snark {
warn!("proof is not in snark mode, please check.");
}
Ok(Risc0Response {
proof: receipt.journal.encode_hex_with_prefix(),
receipt: serde_json::to_string(&receipt).unwrap(),
uuid,
input: output.hash,
}
.into())
};

#[cfg(feature = "bonsai-auto-scaling")]
if config.bonsai {
// shutdown bonsai
shutdown_bonsai()
.await
.map_err(|e| ProverError::GuestError(e.to_string()))?;
}

proof_gen_result
}
}

Expand Down
1 change: 1 addition & 0 deletions provers/risc0/driver/src/methods/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod risc0_aggregation;
pub mod risc0_batch;
pub mod risc0_guest;

// To build the following `$ cargo run --features test,bench --bin risc0-builder`
Expand Down
8 changes: 3 additions & 5 deletions provers/risc0/driver/src/methods/risc0_aggregation.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
pub const RISC0_AGGREGATION_ELF: &[u8] =
include_bytes!("../../../guest/target/riscv32im-risc0-zkvm-elf/release/risc0-aggregation");
pub const RISC0_AGGREGATION_ID: [u32; 8] = [
757572567, 2963367168, 3257289195, 2520060355, 1598050287, 3638503613, 1461238162, 2044849682,
];

pub const RISC0_AGGREGATION_ELF: &[u8] = include_bytes!("../../../guest/target/riscv32im-risc0-zkvm-elf/release/risc0-aggregation");
pub const RISC0_AGGREGATION_ID: [u32; 8] = [3693181676, 1423972539, 2575584598, 1469200887, 651718346, 3917396100, 60015622, 535591167];
3 changes: 3 additions & 0 deletions provers/risc0/driver/src/methods/risc0_batch.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@

pub const RISC0_BATCH_ELF: &[u8] = include_bytes!("../../../guest/target/riscv32im-risc0-zkvm-elf/release/risc0-batch");
pub const RISC0_BATCH_ID: [u32; 8] = [1585630346, 345989788, 4151101683, 819276391, 4218347574, 861965425, 3025423219, 3488057193];
8 changes: 3 additions & 5 deletions provers/risc0/driver/src/methods/risc0_guest.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
pub const RISC0_GUEST_ELF: &[u8] =
include_bytes!("../../../guest/target/riscv32im-risc0-zkvm-elf/release/risc0-guest");
pub const RISC0_GUEST_ID: [u32; 8] = [
1689653193, 2796478021, 3874123379, 560216071, 3867155830, 2784172499, 3235388420, 507179944,
];

pub const RISC0_GUEST_ELF: &[u8] = include_bytes!("../../../guest/target/riscv32im-risc0-zkvm-elf/release/risc0-guest");
pub const RISC0_GUEST_ID: [u32; 8] = [3813617258, 1499495912, 1722814953, 1786341610, 888187967, 1145233772, 2032049967, 243991141];
4 changes: 4 additions & 0 deletions provers/risc0/guest/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ path = "src/zk_op.rs"
name = "risc0-aggregation"
path = "src/aggregation.rs"

[[bin]]
name = "risc0-batch"
path = "src/batch.rs"

[[bin]]
name = "sha256"
path = "src/benchmark/sha256.rs"
Expand Down
39 changes: 39 additions & 0 deletions provers/risc0/guest/src/batch.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#![no_main]
harness::entrypoint!(main, tests, zk_op::tests);
use raiko_lib::{
builder::calculate_batch_blocks_final_header, input::GuestBatchInput, proof_type::ProofType,
protocol_instance::ProtocolInstance,
};
use revm_precompile::zk_op::ZkOperation;
use risc0_zkvm::guest::env;
use zk_op::Risc0Operator;

pub mod mem;

pub use mem::*;

fn main() {
let batch_input: GuestBatchInput = env::read();

revm_precompile::zk_op::ZKVM_OPERATOR.get_or_init(|| Box::new(Risc0Operator {}));
revm_precompile::zk_op::ZKVM_OPERATIONS
.set(Box::new(vec![ZkOperation::Sha256, ZkOperation::Secp256k1]))
.expect("Failed to set ZkvmOperations");

let final_blocks = calculate_batch_blocks_final_header(&batch_input);
let pi = ProtocolInstance::new_batch(&batch_input, final_blocks, ProofType::Risc0)
.unwrap()
.instance_hash();

env::commit(&pi);
}

harness::zk_suits!(
pub mod tests {
#[test]
pub fn test_build_from_mock_input() {
// Todo: impl mock input for static unit test
assert_eq!(1, 1);
}
}
);
5 changes: 4 additions & 1 deletion provers/sp1/builder/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ use std::path::PathBuf;

fn main() {
let pipeline = Sp1Pipeline::new("provers/sp1/guest", "release");
pipeline.bins(&["sp1-guest", "sp1-aggregation"], "provers/sp1/guest/elf");
pipeline.bins(
&["sp1-guest", "sp1-aggregation", "sp1-batch"],
"provers/sp1/guest/elf",
);
#[cfg(feature = "test")]
pipeline.tests(&["sp1-guest"], "provers/sp1/guest/elf");
#[cfg(feature = "bench")]
Expand Down
Loading
Loading