Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

v0.13.0: conditional compression #336

Merged
merged 6 commits into from
Aug 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 24 additions & 24 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ members = [
]

[workspace.package]
version = "0.10.0"
version = "0.13.0"
edition = "2021"
license = "MIT OR Apache-2.0"

Expand All @@ -27,7 +27,7 @@ serde_json = "1.0"
tokio = { version = "1.32", features = ["full"] }

halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "release/v0.12.0", default-features = false, features = ["parallel_syn", "scroll"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.0", default-features = false, features = ["parallel_syn", "scroll"] }
integration = { path = "integration" }

[patch.crates-io]
Expand Down
8 changes: 6 additions & 2 deletions bin/src/chain_prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use integration::capacity_checker::{
prepare_circuit_capacity_checker, run_circuit_capacity_checker, CCCMode,
};
use prover::{
aggregator,
utils::init_env_and_log,
zkevm::{circuit::block_traces_to_witness_block, CircuitCapacityChecker, RowUsage},
BatchData, BlockTrace, ChunkInfo, ChunkProof, MAX_AGG_SNARKS,
Expand Down Expand Up @@ -77,7 +78,9 @@ impl BatchBuilder {
return Some(batch);
}

let compressed_da_size = self.batch_data.get_encoded_batch_data_bytes().len();
let batch_bytes = self.batch_data.get_batch_data_bytes();
let blob_bytes = aggregator::eip4844::get_blob_bytes(&batch_bytes);
let compressed_da_size = blob_bytes.len();
let uncompressed_da_size = self
.batch_data
.chunk_sizes
Expand Down Expand Up @@ -197,7 +200,8 @@ async fn prove_by_block(l2geth: &l2geth_client::Client, begin_block: i64, end_bl
let mut padded_batch = batch.clone();
padding_chunk(&mut padded_batch);
let batch_data = BatchData::<{ MAX_AGG_SNARKS }>::new(batch.len(), &padded_batch);
let compressed_da_size = batch_data.get_encoded_batch_data_bytes().len();
let compressed_da_size =
aggregator::eip4844::get_blob_bytes(&batch_data.get_batch_data_bytes()).len();
log::info!(
"batch built: blob usage {:.3}, chunk num {}, block num {}, block range {} to {}",
compressed_da_size as f32 / constants::N_BLOB_BYTES as f32,
Expand Down
8 changes: 8 additions & 0 deletions bin/src/prove_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,19 @@ pub fn prove_batch(
chunk_proofs: Vec<ChunkProof>,
batch_header: BatchHeader<MAX_AGG_SNARKS>,
) {
use integration::prove::get_blob_from_chunks;
use itertools::Itertools;
use prover::BatchProvingTask;

let chunk_infos = chunk_proofs
.iter()
.map(|p| p.chunk_info.clone())
.collect_vec();
let blob_bytes = get_blob_from_chunks(&chunk_infos);
let batch = BatchProvingTask {
chunk_proofs,
batch_header,
blob_bytes,
};
let result = catch_unwind(AssertUnwindSafe(|| prover::test::batch_prove(id, batch)));

Expand Down
33 changes: 28 additions & 5 deletions integration/src/prove.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::{test_util::PARAMS_DIR, verifier::*};
use prover::{
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BatchProof, BatchProvingTask,
BundleProvingTask, ChunkProvingTask,
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BatchData, BatchProof,
BatchProvingTask, BundleProvingTask, ChunkInfo, ChunkProvingTask, MAX_AGG_SNARKS,
};
use std::{env, time::Instant};

Expand Down Expand Up @@ -48,9 +48,12 @@ pub fn prove_and_verify_batch(
let chunk_num = batch.chunk_proofs.len();
log::info!("Prove batch BEGIN: chunk_num = {chunk_num}");

let batch_proof = batch_prover
.gen_batch_proof(batch, None, Some(output_dir))
.unwrap();
let res_batch_proof = batch_prover.gen_batch_proof(batch, None, Some(output_dir));
if let Err(e) = res_batch_proof {
log::error!("proving err: {e}");
panic!("proving err: {:?}", e);
}
let batch_proof = res_batch_proof.unwrap();

env::set_var("BATCH_VK_FILENAME", "vk_batch_agg.vkey");
let verifier = new_batch_verifier(PARAMS_DIR, output_dir);
Expand Down Expand Up @@ -84,3 +87,23 @@ pub fn prove_and_verify_bundle(

log::info!("Prove bundle END");
}

// `chunks` are unpadded
// Similar codes with aggregator/src/tests/aggregation.rs
// Refactor?
pub fn get_blob_from_chunks(chunks: &[ChunkInfo]) -> Vec<u8> {
let num_chunks = chunks.len();

let padded_chunk =
ChunkInfo::mock_padded_chunk_info_for_testing(chunks.last().as_ref().unwrap());
let chunks_with_padding = [
chunks.to_vec(),
vec![padded_chunk; MAX_AGG_SNARKS - num_chunks],
]
.concat();
let batch_data = BatchData::<{ MAX_AGG_SNARKS }>::new(chunks.len(), &chunks_with_padding);
let batch_bytes = batch_data.get_batch_data_bytes();
let blob_bytes = prover::aggregator::eip4844::get_blob_bytes(&batch_bytes);
log::info!("blob_bytes len {}", blob_bytes.len());
blob_bytes
}
14 changes: 14 additions & 0 deletions integration/src/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,26 @@ type SnarkVerifier = Verifier<CompressionCircuit>;

pub fn new_chunk_verifier(params_dir: &str, assets_dir: &str) -> SnarkVerifier {
let raw_vk = force_to_read(assets_dir, &consts::chunk_vk_filename());
if raw_vk.is_empty() {
panic!(
"empty vk read from {}/{}",
assets_dir,
&consts::chunk_vk_filename()
);
}
env::set_var("COMPRESSION_CONFIG", &*config::LAYER2_CONFIG_PATH);
SnarkVerifier::from_params_dir(params_dir, *config::LAYER2_DEGREE, &raw_vk)
}

pub fn new_batch_verifier(params_dir: &str, assets_dir: &str) -> SnarkVerifier {
let raw_vk = force_to_read(assets_dir, &consts::batch_vk_filename());
if raw_vk.is_empty() {
panic!(
"empty vk read from {}/{}",
assets_dir,
&consts::batch_vk_filename()
);
}
env::set_var("COMPRESSION_CONFIG", &*config::LAYER4_CONFIG_PATH);
SnarkVerifier::from_params_dir(params_dir, *config::LAYER4_DEGREE, &raw_vk)
}
Expand Down
24 changes: 23 additions & 1 deletion integration/tests/batch_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,29 @@ fn test_batch_prove_verify() {
let output_dir = init_env_and_log("batch_tests");
log::info!("Initialized ENV and created output-dir {output_dir}");

let batch = load_batch_proving_task("tests/test_data/full_proof_batch_prove_1.json");
//let task_path = "tests/test_data/batch-task-with-blob.json"; // zstd
let task_path = "tests/test_data/batch-task-with-blob-raw.json"; // no zstd
let mut batch = load_batch_proving_task(task_path);
log::info!("batch hash = {:?}", batch.batch_header.batch_hash());

let chunk_infos = batch
.chunk_proofs
.clone()
.into_iter()
.map(|p| p.chunk_info)
.collect::<Vec<_>>();
let corrected_batch_header = prover::BatchHeader::construct_from_chunks(
batch.batch_header.version,
batch.batch_header.batch_index,
batch.batch_header.l1_message_popped,
batch.batch_header.total_l1_message_popped,
batch.batch_header.parent_batch_hash,
batch.batch_header.last_block_timestamp,
&chunk_infos,
&batch.blob_bytes,
);
batch.batch_header = corrected_batch_header;

dump_chunk_protocol(&batch, &output_dir);
let mut batch_prover = new_batch_prover(&output_dir);
prove_and_verify_batch(&output_dir, &mut batch_prover, batch);
Expand All @@ -34,6 +54,8 @@ fn test_batches_with_each_chunk_num_prove_verify() {
let batch = BatchProvingTask {
batch_header: batch.batch_header,
chunk_proofs: batch.chunk_proofs[..len].to_vec(),
// FIXME
blob_bytes: vec![],
};
prove_and_verify_batch(&output_dir.to_string_lossy(), &mut batch_prover, batch);
}
Expand Down
Loading
Loading