Skip to content
This repository was archived by the owner on Apr 18, 2025. It is now read-only.

[feat] prover support sp1 proof #344

Open
wants to merge 18 commits into
base: main
Choose a base branch
from
11 changes: 7 additions & 4 deletions bin/src/trace_prover.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use clap::Parser;
use integration::{prove::prove_and_verify_chunk, test_util::load_chunk};
use prover::{utils::init_env_and_log, ChunkProvingTask};
use prover::{utils::init_env_and_log, zkevm::Prover, ChunkProvingTask};
use std::env;

#[derive(Parser, Debug)]
Expand Down Expand Up @@ -40,12 +40,15 @@ fn main() {
*prover::config::LAYER2_DEGREE,
],
);
let mut prover = Prover::from_params_and_assets(&params_map, &args.assets_path);
log::info!("Constructed chunk prover");
prove_and_verify_chunk(
chunk,
Some("0"), // same with `make test-chunk-prove`, to load vk
&params_map,
&args.assets_path,
&output_dir,
chunk,
&mut prover,
Some("0"), // same with `make test-chunk-prove`, to load vk
true,
);
log::info!("chunk prove done");
}
127 changes: 119 additions & 8 deletions integration/src/prove.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use prover::{
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BatchData, BatchProof,
BatchProvingTask, BundleProvingTask, ChunkInfo, ChunkProvingTask, MAX_AGG_SNARKS,
BatchProvingTask, BundleProvingTask, ChunkInfo, ChunkProof, ChunkProvingTask, MAX_AGG_SNARKS,
};
use std::{collections::BTreeMap, env, time::Instant};

Expand All @@ -19,16 +19,118 @@ pub fn new_batch_prover<'a>(
prover
}

pub fn prove_and_verify_chunk(
use anyhow::Result;
use prover::{utils::chunk_trace_to_witness_block, Snark};

/// SP1Prover simple compress a snark from sp1, so we have
/// same snark (only different preprocess bytes) as zkevm's chunk proof
pub struct SP1Prover<'p>(ChunkProver<'p>);

impl<'params> SP1Prover<'params> {
pub fn from_params_and_assets(
params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>,
assets_dir: &str,
) -> Self {
Self(ChunkProver::from_params_and_assets(params_map, assets_dir))
}

pub fn get_vk(&self) -> Option<Vec<u8>> {
self.0.get_vk()
}

pub fn gen_chunk_proof(
&mut self,
chunk: ChunkProvingTask,
chunk_identifier: &str,
sp1_snark: Snark,
output_dir: Option<&str>,
) -> Result<ChunkProof> {
use prover::config::LayerId::Layer2;

let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?;
let chunk_info = if let Some(chunk_info_input) = chunk.chunk_info {
chunk_info_input
} else {
log::info!("gen chunk_info {chunk_identifier:?}");
ChunkInfo::from_witness_block(&witness_block, false)
};

let comp_snark = self.0.prover_impl.load_or_gen_comp_snark(
chunk_identifier,
Layer2.id(),
false,
Layer2.degree(),
sp1_snark,
output_dir,
)?;

let pk = self.0.prover_impl.pk(Layer2.id());
let result = ChunkProof::new(comp_snark, pk, chunk_info, Vec::new());

// in case we read the snark directly from previous calculation,
// the pk is not avaliable and we skip dumping the proof
if pk.is_some() {
if let (Some(output_dir), Ok(proof)) = (output_dir, &result) {
proof.dump(output_dir, chunk_identifier)?;
}
} else {
log::info!("skip dumping vk since snark is restore from disk")
}
result
}
}

/// prove_and_verify_sp1_chunk would expect a sp1 snark name "sp1_snark_<chunk_id>.json"
pub fn prove_and_verify_sp1_chunk(
params_map: &BTreeMap<u32, ParamsKZG<Bn256>>,
output_dir: &str,
sp1_dir: Option<&str>,
chunk: ChunkProvingTask,
prover: &mut SP1Prover,
chunk_identifier: Option<&str>,
) -> ChunkProof {
use prover::io::load_snark;
use std::path::Path;

let chunk_identifier =
chunk_identifier.map_or_else(|| chunk.identifier(), |name| name.to_string());

let sp1_dir = sp1_dir.unwrap_or(output_dir);
let sp1_snark_name = format!("sp1_snark_{}.json", chunk_identifier);

let now = Instant::now();
let sp1_snark = load_snark(Path::new(sp1_dir).join(&sp1_snark_name).to_str().unwrap())
.ok()
.flatten()
.unwrap();
let chunk_proof = prover
.gen_chunk_proof(chunk, &chunk_identifier, sp1_snark, Some(output_dir))
.expect("cannot generate sp1 chunk snark");
log::info!(
"finish generating sp1 chunk snark, elapsed: {:?}",
now.elapsed()
);

// output_dir is used to load chunk vk
env::set_var(
"CHUNK_VK_FILENAME",
&format!("vk_chunk_{chunk_identifier}.vkey"),
);
let verifier = new_chunk_verifier(params_map, output_dir);
assert!(verifier.verify_snark(chunk_proof.clone().to_snark()));
log::info!("Verified sp1 chunk proof");

chunk_proof
}

pub fn prove_and_verify_chunk(
params_map: &BTreeMap<u32, ParamsKZG<Bn256>>,
assets_path: &str,
output_dir: &str,
) {
let mut prover = ChunkProver::from_params_and_assets(params_map, assets_path);
log::info!("Constructed chunk prover");

chunk: ChunkProvingTask,
prover: &mut ChunkProver,
chunk_identifier: Option<&str>,
skip_verify: bool,
) -> ChunkProof {
let chunk_identifier =
chunk_identifier.map_or_else(|| chunk.identifier(), |name| name.to_string());

Expand All @@ -41,14 +143,23 @@ pub fn prove_and_verify_chunk(
now.elapsed()
);

// there is an issue: if snark is restore from disk, the pk is not generated
// and the dumping process of proof would write the existed vk with 0 bytes
// and cause verify failed
// the work-around is skip verify in e2e test
if skip_verify {
return chunk_proof;
}
// output_dir is used to load chunk vk
env::set_var(
"CHUNK_VK_FILENAME",
&format!("vk_chunk_{chunk_identifier}.vkey"),
);
let verifier = new_chunk_verifier(params_map, output_dir);
assert!(verifier.verify_snark(chunk_proof.to_snark()));
assert!(verifier.verify_snark(chunk_proof.clone().to_snark()));
log::info!("Verified chunk proof");

chunk_proof
}

pub fn prove_and_verify_batch(
Expand Down
87 changes: 86 additions & 1 deletion integration/tests/batch_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ fn test_batches_with_each_chunk_num_prove_verify() {
use prover::config::AGG_DEGREES;

let output_dir = init_env_and_log("batches_with_each_chunk_num_tests");
log::info!("Initialized ENV and created output-dir {output_dir}");
log::info!("Initialized ENV and use output-dir {output_dir}");

let params_map = prover::common::Prover::load_params_map(
PARAMS_DIR,
Expand Down Expand Up @@ -84,6 +84,91 @@ fn test_batches_with_each_chunk_num_prove_verify() {
}
}

#[cfg(feature = "prove_verify")]
#[ignore = "only used for subsequent chunk tests"]
#[test]
fn test_batch_prove_verify_after_chunk_tests() {
use integration::{
prove::get_blob_from_chunks,
test_util::{load_chunk, trace_path_for_test, PARAMS_DIR},
};
use itertools::Itertools;
use prover::{
config::AGG_DEGREES, eth_types::H256, proof::ChunkProof, BatchHeader, ChunkProvingTask,
};

let output_dir = init_env_and_log("batch_tests");
log::info!("Initialized ENV and created output-dir {output_dir}");

let params_map = prover::common::Prover::load_params_map(
PARAMS_DIR,
&AGG_DEGREES.iter().copied().collect_vec(),
);

let trace_paths_env = trace_path_for_test();
let trace_paths: Vec<_> = trace_paths_env.split(';').collect();
log::info!("Use traces paths {trace_paths:?}");

let mut l1_message_popped = 0;
let mut last_block_timestamp = 0;

// like gen_batch_proving_task in e2e, collect every chunks
let chunk_proofs = trace_paths
.iter()
.map(|chunk_dir| load_chunk(chunk_dir).1)
.map(|traces| {
// make use of traces before consumed by chunkproof
l1_message_popped += traces.iter().map(|tr| tr.num_l1_txs()).sum::<u64>();
last_block_timestamp = traces
.last()
.map_or(last_block_timestamp, |tr| tr.header.timestamp.as_u64());

let task = ChunkProvingTask::from(traces);
let loaded_proof = ChunkProof::from_json_file(&output_dir, &task.identifier());
if let Ok(proof) = loaded_proof.as_ref() {
log::info!(
"expected PI of {} is {:#x?}",
task.identifier(),
proof.chunk_info.public_input_hash(),
);
}
loaded_proof
})
.collect::<Result<Vec<_>, _>>()
.unwrap();

let chunk_infos = chunk_proofs
.iter()
.map(|proof| proof.chunk_info.clone())
.collect::<Vec<_>>();

let blob_bytes = get_blob_from_chunks(&chunk_infos);

let batch_header = BatchHeader::construct_from_chunks(
4,
123,
l1_message_popped,
l1_message_popped,
H256([
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
]),
last_block_timestamp,
&chunk_infos,
&blob_bytes,
);

let batch = BatchProvingTask {
chunk_proofs,
batch_header,
blob_bytes,
};

// dump_chunk_protocol(&batch, &output_dir);
let mut batch_prover = new_batch_prover(&params_map, &output_dir);
prove_and_verify_batch(&params_map, &output_dir, &mut batch_prover, batch);
}

fn load_batch_proving_task(batch_task_file: &str) -> BatchProvingTask {
let batch: BatchProvingTask = from_json_file(batch_task_file).unwrap();
let tx_bytes_total_len: usize = batch
Expand Down
43 changes: 43 additions & 0 deletions integration/tests/bundle_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,46 @@ fn gen_bundle_proving_task(batch_proof_files: &[&str]) -> BundleProvingTask {

BundleProvingTask { batch_proofs }
}

#[ignore]
#[test]
fn test_bundle_prove_verify_after_batch() {
use glob::glob;
use integration::test_util::PARAMS_DIR;
use itertools::Itertools;
use prover::{config::AGG_DEGREES, io::from_json_file, BatchProvingTask};

let output_dir = init_env_and_log("bundle_tests");

let mut batch_tasks = glob(&format!("{output_dir}/full_proof_batch_prove_?.json"))
.unwrap()
.map(|task_path| {
from_json_file::<BatchProvingTask>(task_path.unwrap().to_str().unwrap()).unwrap()
})
.collect::<Vec<_>>();

batch_tasks
.as_mut_slice()
.sort_by_key(|task| task.batch_header.batch_index);

let batch_proofs: Vec<BatchProof> = batch_tasks
.iter()
.map(|task| {
log::info!("local batch proof {}", task.identifier());
from_json_file(&format!(
"{output_dir}/full_proof_batch_{}.json",
task.identifier()
))
.unwrap()
})
.collect();

let bundle = BundleProvingTask { batch_proofs };
let params_map = prover::common::Prover::load_params_map(
PARAMS_DIR,
&AGG_DEGREES.iter().copied().collect_vec(),
);

let mut prover = new_batch_prover(&params_map, &output_dir);
prove_and_verify_bundle(&output_dir, &mut prover, bundle);
}
Loading
Loading