Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

recursion #305

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 13 additions & 13 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ serde_json = "1.0"
tokio = { version = "1.32", features = ["full"] }

halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.0rc3", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
zkevm-circuits = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.0rc3", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.3", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
zkevm-circuits = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.3", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }

integration = { path = "integration" }

Expand Down
30 changes: 11 additions & 19 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ CURRENTDATE=`date +"%Y-%m-%d"`

CHAIN_ID ?= 534352
export CHAIN_ID
RUST_MIN_STACK ?= 100000000
export RUST_MIN_STACK

help: ## Display this help screen
@grep -h \
Expand All @@ -25,39 +27,29 @@ clippy: ## Run clippy checks over all workspace members
test: ## Run tests for all the workspace members
@cargo test --release --all

bridge-test:
cargo build --release
./target/release/prove --params=./test_params --trace=prover/tests/traces/bridge
mock-testnet:
@cargo run --bin mock_testnet --release

mock:
@cargo test --features prove_verify --release test_mock_prove -- --exact --nocapture

mock-debug:
@cargo test --features prove_verify test_mock_prove -- --exact --nocapture

mock-testnet:
@cargo run --bin mock_testnet --release

test-inner-prove:
@cargo test --features prove_verify --release test_inner_prove_verify
@cargo test --features prove_verify --release test_inner_prove_verify -- --exact --nocapture

test-chunk-prove:
@cargo test --features prove_verify --release test_chunk_prove_verify
@cargo test --features prove_verify --release test_chunk_prove_verify -- --exact --nocapture

test-agg-prove:
@cargo test --features prove_verify --release test_agg_prove_verify
test-e2e-prove:
@cargo test --features prove_verify --release test_e2e_prove_verify -- --exact --nocapture

test-batch-prove:
@cargo test --features prove_verify --release test_batch_prove_verify

test-batches-with-each-chunk-num-prove:
@cargo test --features prove_verify --release test_batches_with_each_chunk_num_prove_verify
@cargo test --features prove_verify --release test_batch_prove_verify -- --exact --nocapture

test-ccc:
@cargo test --release test_capacity_checker
@cargo test --features prove_verify --release test_capacity_checker -- --exact --nocapture

rows:
@cargo test --features prove_verify --release estimate_circuit_rows
@cargo test --features prove_verify --release estimate_circuit_rows -- --exact --nocapture

# Could be called as `make download-setup -e degree=DEGREE params_dir=PARAMS_DIR`.
# As default `degree=25` and `params_dir=./prover/test_params`.
Expand Down
56 changes: 23 additions & 33 deletions integration/src/test_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,51 +20,28 @@ pub use proof::{
pub const ASSETS_DIR: &str = "./test_assets";
pub const PARAMS_DIR: &str = "./test_params";

pub fn parse_trace_path_from_mode(mode: &str) -> &'static str {
let trace_path = match mode {
"empty" => "./tests/traces/bridge/01.json",
"greeter" => "./tests/traces/greeter/setValue.json",
"single" => "./tests/traces/erc20/1_transfer.json",
"multiple" => "./tests/extra_traces/batch_495/chunk_495/block_8802.json",
"multiswap" => "./tests/traces/multi_uniswapv2/router-swapExactTokensForTokens_34.json",
"native" => "./tests/traces/native/transfer.json",
"dao" => "./tests/traces/dao/dao-propose.json",
"nft" => "./tests/traces/nft/mint.json",
"sushi" => "./tests/traces/sushi/chef-withdraw.json",
_ => "./tests/extra_traces/batch_495/chunk_495/block_8802.json",
};
log::info!("using mode {:?}, testing with {:?}", mode, trace_path);
trace_path
}

pub fn load_block_traces_for_test() -> (Vec<String>, Vec<BlockTrace>) {
let trace_path: String = read_env_var("TRACE_PATH", "".to_string());
let paths: Vec<String> = if trace_path.is_empty() {
// use mode
let mode = read_env_var("MODE", "default".to_string());
if mode.to_lowercase() == "batch" || mode.to_lowercase() == "pack" {
(1..=20)
.map(|i| format!("tests/traces/bridge/{i:02}.json"))
.collect()
} else {
vec![parse_trace_path_from_mode(&mode).to_string()]
}
} else if !std::fs::metadata(&trace_path).unwrap().is_dir() {
let trace_path: String = read_env_var(
"TRACE_PATH",
"./tests/extra_traces/batch_495/chunk_495/block_8802.json".to_string(),
);
let paths: Vec<String> = if !std::fs::metadata(&trace_path).unwrap().is_dir() {
vec![trace_path]
} else {
load_batch_traces(&trace_path).0
load_chunk_traces(&trace_path).0
};
log::info!("test cases traces: {:?}", paths);
let traces: Vec<_> = paths.iter().map(get_block_trace_from_file).collect();
(paths, traces)
}

fn load_batch_traces(batch_dir: &str) -> (Vec<String>, Vec<BlockTrace>) {
let file_names: Vec<String> = glob(&format!("{batch_dir}/**/*.json"))
fn load_chunk_traces(chunk_dir: &str) -> (Vec<String>, Vec<BlockTrace>) {
// Nested dirs are not allowed
let file_names: Vec<String> = glob(&format!("{chunk_dir}/*.json"))
.unwrap()
.map(|p| p.unwrap().to_str().unwrap().to_string())
.collect();
log::info!("test batch with {:?}", file_names);
log::info!("test chunk with {:?}", file_names);
let mut names_and_traces = file_names
.into_iter()
.map(|trace_path| {
Expand All @@ -86,3 +63,16 @@ fn load_batch_traces(batch_dir: &str) -> (Vec<String>, Vec<BlockTrace>) {
);
names_and_traces.into_iter().map(|(f, t, _)| (f, t)).unzip()
}

pub fn load_batch() -> anyhow::Result<Vec<String>> {
let batch_dir = read_env_var("TRACE_PATH", "./tests/extra_traces/batch_24".to_string());
let mut sorted_dirs: Vec<String> = std::fs::read_dir(batch_dir)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
.filter(|path| path.is_dir())
.map(|path| path.to_string_lossy().into_owned())
.collect::<Vec<String>>();
sorted_dirs.sort();
log::info!("batch content: {:?}", sorted_dirs);
Ok(sorted_dirs)
}
25 changes: 15 additions & 10 deletions integration/src/test_util/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,29 +7,34 @@ use prover::{
};
use std::env;

pub fn gen_and_verify_batch_proofs(agg_prover: &mut Prover, layer3_snark: Snark, output_dir: &str) {
let evm_proof = gen_and_verify_normal_and_evm_proofs(
pub fn gen_and_verify_batch_proofs(
agg_prover: &mut Prover,
layer3_snark: Snark,
output_dir: &str,
assets_dir: &str,
) {
let (_normal_proof, evm_proof) = gen_and_verify_normal_and_evm_proofs(
&mut agg_prover.inner,
LayerId::Layer4,
layer3_snark,
Some(output_dir),
)
.1;
verify_batch_proof(evm_proof, output_dir);
);
verify_batch_proof(evm_proof, output_dir, assets_dir);
}

// So here even for chunk, we generate and verify a evm proof
// TODO: remove this?
pub fn gen_and_verify_chunk_proofs(
zkevm_prover: &mut zkevm::Prover,
layer1_snark: Snark,
output_dir: &str,
) {
let normal_proof = gen_and_verify_normal_and_evm_proofs(
let (normal_proof, _evm_proof) = gen_and_verify_normal_and_evm_proofs(
&mut zkevm_prover.inner,
LayerId::Layer2,
layer1_snark,
Some(output_dir),
)
.0;
);
verify_chunk_proof(&zkevm_prover.inner, normal_proof, output_dir);
}

Expand Down Expand Up @@ -99,12 +104,12 @@ fn gen_normal_proof(
snark
}

fn verify_batch_proof(evm_proof: EvmProof, output_dir: &str) {
fn verify_batch_proof(evm_proof: EvmProof, output_dir: &str, assets_dir: &str) {
let batch_proof = BatchProof::from(evm_proof.proof);
batch_proof.dump(output_dir, "agg").unwrap();
batch_proof.clone().assert_calldata();

let verifier = Verifier::from_dirs(PARAMS_DIR, output_dir);
let verifier = Verifier::from_dirs(PARAMS_DIR, assets_dir);
log::info!("Constructed aggregator verifier");

assert!(verifier.verify_agg_evm_proof(batch_proof));
Expand Down
Loading