diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 446d6a314..13089b3d0 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -13,8 +13,6 @@ concurrency: env: CARGO_TERM_COLOR: always - BINSTALL_NO_CONFIRM: true - RUSTDOCFLAGS: "-D warnings" jobs: clippy: @@ -42,7 +40,15 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ./.github/actions/rust - - run: cargo doc --all --no-deps + - run: RUSTDOCFLAGS='-D warnings -A rustdoc::private_intra_doc_links' cargo doc --all --no-deps + # TODO(zero): https://github.com/0xPolygonZero/zk_evm/issues/718 + - run: > + RUSTDOCFLAGS='-D warnings -A rustdoc::private_intra_doc_links' cargo doc --no-deps --document-private-items + --package trace_decoder + --package compat + --package smt_trie + --package zk_evm_proc_macro + --package zk_evm_common cargo-fmt: runs-on: ubuntu-latest timeout-minutes: 5 diff --git a/Cargo.lock b/Cargo.lock index 7ac076699..8ee516001 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -58,9 +58,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c37d89f69cb43901949ba29307ada8b9e3b170f94057ad4c04d6fd169d24d65f" +checksum = "b2683873c2744f6cd72d0db51bb74fee9ed310e0476a140bdc19e82b407d8a0a" dependencies = [ "alloy-consensus", "alloy-core", @@ -76,9 +76,9 @@ dependencies = [ [[package]] name = "alloy-chains" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b4f201b0ac8f81315fbdc55269965a8ddadbc04ab47fa65a1a468f9a40f7a5f" +checksum = "b68b94c159bcc2ca5f758b8663d7b00fc7c5e40569984595ddf2221b0f7f7f6e" dependencies = [ "num_enum", "strum", @@ -96,9 +96,9 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1468e3128e07c7afe4ff13c17e8170c330d12c322f8924b8bf6986a27e0aad3d" +checksum = "c28ddd17ffb7e4d66ef3a84e7b179072a9320cdc4b26c7f6f44cbf1081631b36" dependencies = [ "alloy-eips", "alloy-primitives", @@ -142,9 +142,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c35df7b972b06f1b2f4e8b7a53328522fa788054a9d3e556faf2411c5a51d5a" +checksum = "2f6c5c0a383f14519531cf58d8440e74f10b938e289f803af870be6f79223110" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -160,9 +160,9 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8866562186d237f1dfeaf989ef941a24764f764bf5c33311e37ead3519c6a429" +checksum = "7111af869909275cffc5c84d16b6c892d6d512773e40cbe83187d0b9c5235e91" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -174,9 +174,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abe714e233f9eaf410de95a9af6bcd05d3a7f8c8de7a0817221e95a6b642a080" +checksum = "342028392a2d5050b7b93dd32a0715d3b3b9ce30072ecb69a35dd4895c005495" dependencies = [ "alloy-consensus", "alloy-eips", @@ -195,9 +195,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c5a38117974c5776a45e140226745a0b664f79736aa900995d8e4121558e064" +checksum = "a6e66d78c049dcadd065a926a9f2d9a9b2b10981a7889449e694fac7bccd2c6f" dependencies = [ "alloy-eips", "alloy-primitives", @@ -229,9 +229,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c65633d6ef83c3626913c004eaf166a6dd50406f724772ea8567135efd6dc5d3" +checksum = "79f14ccc2a3c575cb17b1b4af8c772cf9b5b93b7ce7047d6640e53954abb558d" dependencies = [ "alloy-chains", "alloy-consensus", @@ -286,9 +286,9 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5fc328bb5d440599ba1b5aa44c0b9ab0625fbc3a403bb5ee94ed4a01ba23e07" +checksum = "4dc79aeca84abb122a2fffbc1c91fdf958dca5c95be3875977bc99672bde0027" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -307,9 +307,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f8ff679f94c497a8383f2cd09e2a099266e5f3d5e574bc82b4b379865707dbb" +checksum = "22045187a5ebf5b2af3f8b6831b66735b6556c5750ec5790aeeb45935260c1c2" dependencies = [ "alloy-rpc-types-eth", "alloy-rpc-types-trace", @@ -319,9 +319,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a59b1d7c86e0a653e7f3d29954f6de5a2878d8cfd1f010ff93be5c2c48cd3b1" +checksum = "238f494727ff861a803bd73b1274ef788a615bf8f8c4bfada4e6df42afa275d2" dependencies = [ "alloy-consensus", "alloy-eips", @@ -330,17 +330,19 @@ dependencies = [ "alloy-rlp", "alloy-serde", "alloy-sol-types", + "cfg-if", + "derive_more", + "hashbrown", "itertools 0.13.0", "serde", "serde_json", - "thiserror", ] [[package]] name = "alloy-rpc-types-trace" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c54375e5a34ec5a2cf607f9ce98c0ece30dc76ad623afeb25d3953a8d7d30f20" +checksum = "64ca08b0ccc0861055ceb83a1db009c4c8a7f52a259e7cda7ca6ca36ec2b5ce8" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -352,9 +354,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51db8a6428a2159e01b7a43ec7aac801edd0c4db1d4de06f310c288940f16fd3" +checksum = "6b95b6f024a558593dd3b8628af03f7df2ca50e4c56839293ad0a7546e471db0" dependencies = [ "alloy-primitives", "serde", @@ -363,9 +365,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bebc1760c13592b7ba3fcd964abba546b8d6a9f10d15e8d92a8263731be33f36" +checksum = "da64740ff0518606c514eb0e03dd0a1daa8ff94d6d491a626fd8e50efd6c4f18" dependencies = [ "alloy-primitives", "async-trait", @@ -435,9 +437,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd5dc4e902f1860d54952446d246ac05386311ad61030a2b906ae865416d36e0" +checksum = "3c7a669caa427abe8802184c8776f5103302f9337bb30a5b36bdebc332946c14" dependencies = [ "alloy-json-rpc", "base64", @@ -454,9 +456,9 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1742b94bb814f1ca6b322a6f9dd38a0252ff45a3119e40e888fb7029afa500ce" +checksum = "4433ffa97aab6ae643de81c7bde9a2f043496f27368a607405a5c78a610caf74" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -572,9 +574,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.88" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e1496f8fb1fbf272686b8d37f523dab3e4a7443300055e74cdaa449f3114356" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" dependencies = [ "backtrace", ] @@ -1149,6 +1151,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "build-array" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67ef4e2687af237b2646687e19a0643bc369878216122e46c3f1a01c56baa9d5" +dependencies = [ + "arrayvec", +] + [[package]] name = "bumpalo" version = "3.16.0" @@ -3268,9 +3279,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "33ea5043e58958ee56f3e15a90aee535795cd7dfd319846288d93c5b57d85cbe" [[package]] name = "oorandom" @@ -3358,9 +3369,9 @@ dependencies = [ [[package]] name = "paladin-core" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5af1955eaab1506a43d046628c218b7b3915539554838feb85ed31f54aace2f2" +checksum = "6343e2767eb6ac7f55ca8b869df03c2986b88f0b9ad09cada898b3770ac436e3" dependencies = [ "anyhow", "async-trait", @@ -3378,6 +3389,7 @@ dependencies = [ "paladin-opkind-derive", "pin-project", "postcard", + "rand", "serde", "thiserror", "tokio", @@ -3386,14 +3398,13 @@ dependencies = [ "tokio-util", "tracing", "tracing-subscriber", - "uuid", ] [[package]] name = "paladin-opkind-derive" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af25dcb10b7c0ce99abee8694e2e79e4787d7f778b9339dc5a50ba6fc45e5cc9" +checksum = "0bde85d55c108d4eef3404b4c88d1982a270dc11146a085f4f45c1548b4d2b4c" dependencies = [ "quote", "syn 2.0.77", @@ -3634,7 +3645,7 @@ checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "plonky2" version = "0.2.2" -source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e16ad4d7f1c2b057903602eed#dc77c77f2b06500e16ad4d7f1c2b057903602eed" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=8463effe0dd1472a52906cd12ffb047885db42ee#8463effe0dd1472a52906cd12ffb047885db42ee" dependencies = [ "ahash", "anyhow", @@ -3645,7 +3656,7 @@ dependencies = [ "log", "num", "plonky2_field", - "plonky2_maybe_rayon 0.2.0 (git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e16ad4d7f1c2b057903602eed)", + "plonky2_maybe_rayon 0.2.0 (git+https://github.com/0xPolygonZero/plonky2.git?rev=8463effe0dd1472a52906cd12ffb047885db42ee)", "plonky2_util", "rand", "rand_chacha", @@ -3658,7 +3669,7 @@ dependencies = [ [[package]] name = "plonky2_field" version = "0.2.2" -source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e16ad4d7f1c2b057903602eed#dc77c77f2b06500e16ad4d7f1c2b057903602eed" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=8463effe0dd1472a52906cd12ffb047885db42ee#8463effe0dd1472a52906cd12ffb047885db42ee" dependencies = [ "anyhow", "itertools 0.11.0", @@ -3682,7 +3693,7 @@ dependencies = [ [[package]] name = "plonky2_maybe_rayon" version = "0.2.0" -source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e16ad4d7f1c2b057903602eed#dc77c77f2b06500e16ad4d7f1c2b057903602eed" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=8463effe0dd1472a52906cd12ffb047885db42ee#8463effe0dd1472a52906cd12ffb047885db42ee" dependencies = [ "rayon", ] @@ -3690,7 +3701,7 @@ dependencies = [ [[package]] name = "plonky2_util" version = "0.2.0" -source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e16ad4d7f1c2b057903602eed#dc77c77f2b06500e16ad4d7f1c2b057903602eed" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=8463effe0dd1472a52906cd12ffb047885db42ee#8463effe0dd1472a52906cd12ffb047885db42ee" [[package]] name = "plotters" @@ -4651,7 +4662,7 @@ checksum = "8acdd7dbfcfb5dd6e46c63512508bf71c2043f70b8f143813ad75cb5e8a589f2" [[package]] name = "starky" version = "0.4.0" -source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e16ad4d7f1c2b057903602eed#dc77c77f2b06500e16ad4d7f1c2b057903602eed" +source = "git+https://github.com/0xPolygonZero/plonky2.git?rev=8463effe0dd1472a52906cd12ffb047885db42ee#8463effe0dd1472a52906cd12ffb047885db42ee" dependencies = [ "ahash", "anyhow", @@ -4660,7 +4671,7 @@ dependencies = [ "log", "num-bigint", "plonky2", - "plonky2_maybe_rayon 0.2.0 (git+https://github.com/0xPolygonZero/plonky2.git?rev=dc77c77f2b06500e16ad4d7f1c2b057903602eed)", + "plonky2_maybe_rayon 0.2.0 (git+https://github.com/0xPolygonZero/plonky2.git?rev=8463effe0dd1472a52906cd12ffb047885db42ee)", "plonky2_util", ] @@ -5125,6 +5136,7 @@ dependencies = [ "assert2", "bitflags 2.6.0", "bitvec", + "build-array", "bytes", "camino", "ciborium", @@ -5357,17 +5369,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" -[[package]] -name = "uuid" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" -dependencies = [ - "getrandom", - "rand", - "serde", -] - [[package]] name = "valuable" version = "0.1.0" @@ -5825,6 +5826,7 @@ dependencies = [ "anyhow", "async-stream", "axum", + "cfg-if", "clap", "compat", "directories", diff --git a/Cargo.toml b/Cargo.toml index 175fbbebd..ece36735e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,6 @@ axum = "0.7.5" bitflags = "2.5.0" bitvec = "1.0.1" bytes = "1.6.0" -cargo_metadata = "0.18.1" ciborium = "0.2.2" ciborium-io = "0.2.2" clap = { version = "4.5.7", features = ["derive", "env"] } @@ -73,9 +72,8 @@ num-bigint = "0.4.5" num-traits = "0.2.19" nunny = "0.2.1" once_cell = "1.19.0" -paladin-core = "0.4.2" +paladin-core = "0.4.3" parking_lot = "0.12.3" -paste = "1.0.15" pest = "2.7.10" pest_derive = "2.7.10" pretty_env_logger = "0.5.0" @@ -98,7 +96,6 @@ syn = "2.0" thiserror = "1.0.61" tiny-keccak = "2.0.2" tokio = { version = "1.38.0", features = ["full"] } -toml = "0.8.14" tower = "0.4" tracing = { version = "0.1", features = ["attributes"] } tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } @@ -118,10 +115,10 @@ zk_evm_proc_macro = { path = "proc_macro", version = "0.1.0" } zero = { path = "zero", default-features = false } # plonky2-related dependencies -plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "dc77c77f2b06500e16ad4d7f1c2b057903602eed" } +plonky2 = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "8463effe0dd1472a52906cd12ffb047885db42ee" } plonky2_maybe_rayon = "0.2.0" -plonky2_util = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "dc77c77f2b06500e16ad4d7f1c2b057903602eed" } -starky = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "dc77c77f2b06500e16ad4d7f1c2b057903602eed" } +plonky2_util = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "8463effe0dd1472a52906cd12ffb047885db42ee" } +starky = { git = "https://github.com/0xPolygonZero/plonky2.git", rev = "8463effe0dd1472a52906cd12ffb047885db42ee" } [workspace.lints.clippy] too_long_first_doc_paragraph = "allow" diff --git a/evm_arithmetization/src/fixed_recursive_verifier.rs b/evm_arithmetization/src/fixed_recursive_verifier.rs index 0d7bb82dd..96ecce6d3 100644 --- a/evm_arithmetization/src/fixed_recursive_verifier.rs +++ b/evm_arithmetization/src/fixed_recursive_verifier.rs @@ -131,6 +131,8 @@ where /// Holds chains of circuits for each table and for each initial /// `degree_bits`. pub by_table: [RecursiveCircuitsForTable; NUM_TABLES], + /// Dummy proofs of each table for the root circuit. + pub table_dummy_proofs: [Option>; NUM_TABLES], } /// Data for the EVM root circuit, which is used to combine each STARK's shrunk @@ -569,6 +571,43 @@ where } } +/// A struct that encapsulates both the init degree and the shrunk proof. +#[derive(Eq, PartialEq, Debug)] +pub struct ShrunkProofData, C: GenericConfig, const D: usize> +{ + /// The [`CommonCircuitData`] of the last shrinking circuit. + pub common_circuit_data: CommonCircuitData, + + /// The proof after applying shrinking recursion. + pub proof: ProofWithPublicInputs, +} + +impl, C: GenericConfig, const D: usize> + ShrunkProofData +{ + fn to_buffer( + &self, + buffer: &mut Vec, + gate_serializer: &dyn GateSerializer, + ) -> IoResult<()> { + buffer.write_common_circuit_data(&self.common_circuit_data, gate_serializer)?; + buffer.write_proof_with_public_inputs(&self.proof)?; + Ok(()) + } + + fn from_buffer( + buffer: &mut Buffer, + gate_serializer: &dyn GateSerializer, + ) -> IoResult { + let common_circuit_data = buffer.read_common_circuit_data(gate_serializer)?; + let proof = buffer.read_proof_with_public_inputs(&common_circuit_data)?; + Ok(Self { + common_circuit_data, + proof, + }) + } +} + impl AllRecursiveCircuits where F: RichField + Extendable, @@ -612,6 +651,15 @@ where table.to_buffer(&mut buffer, gate_serializer, generator_serializer)?; } } + for table in &self.table_dummy_proofs { + match table { + Some(dummy_proof_data) => { + buffer.write_bool(true)?; + dummy_proof_data.to_buffer(&mut buffer, gate_serializer)? + } + None => buffer.write_bool(false)?, + } + } Ok(buffer) } @@ -690,6 +738,14 @@ where } }; + let table_dummy_proofs = core::array::from_fn(|_| { + if buffer.read_bool().ok()? { + Some(ShrunkProofData::from_buffer(&mut buffer, gate_serializer).ok()?) + } else { + None + } + }); + Ok(Self { root, segment_aggregation, @@ -698,6 +754,7 @@ where block_wrapper, two_to_one_block, by_table, + table_dummy_proofs, }) } @@ -774,6 +831,33 @@ where let block_wrapper = Self::create_block_wrapper_circuit(&block); let two_to_one_block = Self::create_two_to_one_block_circuit(&block_wrapper); + // TODO(sdeng): enable more optional Tables + let table_dummy_proofs = core::array::from_fn(|i| { + if KECCAK_TABLES_INDICES.contains(&i) { + let init_degree = degree_bits_ranges[i].start; + let common_circuit_data = by_table[i] + .by_stark_size + .get(&init_degree) + .expect("Unable to get the shrinking circuits") + .shrinking_wrappers + .last() + .expect("Unable to get the last shrinking circuit") + .circuit + .common + .clone(); + let dummy_circuit: CircuitData = dummy_circuit(&common_circuit_data); + let dummy_pis = HashMap::new(); + let proof = dummy_proof(&dummy_circuit, dummy_pis) + .expect("Unable to generate dummy proofs"); + Some(ShrunkProofData { + common_circuit_data, + proof, + }) + } else { + None + } + }); + Self { root, segment_aggregation, @@ -782,6 +866,7 @@ where block_wrapper, two_to_one_block, by_table, + table_dummy_proofs, } } @@ -1903,7 +1988,6 @@ where abort_signal: Option>, ) -> anyhow::Result> { features_check(&generation_inputs); - let all_proof = prove::( all_stark, config, @@ -1912,37 +1996,32 @@ where timing, abort_signal.clone(), )?; + self.prove_segment_with_all_proofs(&all_proof, config, abort_signal.clone()) + } + pub fn prove_segment_with_all_proofs( + &self, + all_proof: &AllProof, + config: &StarkConfig, + abort_signal: Option>, + ) -> anyhow::Result> { let mut root_inputs = PartialWitness::new(); for table in 0..NUM_TABLES { let table_circuits = &self.by_table[table]; if KECCAK_TABLES_INDICES.contains(&table) && !all_proof.use_keccak_tables { - // generate and set a dummy `index_verifier_data` and `proof_with_pis` - let index_verifier_data = - table_circuits.by_stark_size.keys().min().ok_or_else(|| { - anyhow::format_err!("No valid size in shrinking circuits") - })?; - root_inputs.set_target( - self.root.index_verifier_data[table], - F::from_canonical_usize(*index_verifier_data), + let dummy_proof_data = self.table_dummy_proofs[table] + .as_ref() + .ok_or_else(|| anyhow::format_err!("No dummy_proof_data"))?; + root_inputs.set_target(self.root.index_verifier_data[table], F::ZERO); + root_inputs.set_proof_with_pis_target( + &self.root.proof_with_pis[table], + &dummy_proof_data.proof, ); - let table_circuit = table_circuits - .by_stark_size - .get(index_verifier_data) - .ok_or_else(|| anyhow::format_err!("No valid size in shrinking circuits"))? - .shrinking_wrappers - .last() - .ok_or_else(|| anyhow::format_err!("No shrinking circuits"))?; - let dummy_circuit: CircuitData = - dummy_circuit(&table_circuit.circuit.common); - let dummy_pis = HashMap::new(); - let dummy_proof = dummy_proof(&dummy_circuit, dummy_pis) - .expect("Unable to generate dummy proofs"); - root_inputs - .set_proof_with_pis_target(&self.root.proof_with_pis[table], &dummy_proof); } else { - let stark_proof = &all_proof.multi_proof.stark_proofs[table]; + let stark_proof = &all_proof.multi_proof.stark_proofs[table] + .as_ref() + .ok_or_else(|| anyhow::format_err!("Unable to get stark proof"))?; let original_degree_bits = stark_proof.proof.recover_degree_bits(config); let shrunk_proof = table_circuits .by_stark_size @@ -1993,7 +2072,7 @@ where is_agg: false, is_dummy: false, proof_with_pvs: ProofWithPublicValues { - public_values: all_proof.public_values, + public_values: all_proof.public_values.clone(), intern: root_proof, }, }) @@ -2052,40 +2131,34 @@ where pub fn prove_segment_after_initial_stark( &self, all_proof: AllProof, - table_circuits: &[(RecursiveCircuitsForTableSize, u8); NUM_TABLES], + table_circuits: &[Option<(RecursiveCircuitsForTableSize, u8)>; NUM_TABLES], abort_signal: Option>, ) -> anyhow::Result> { let mut root_inputs = PartialWitness::new(); for table in 0..NUM_TABLES { - let (table_circuit, index_verifier_data) = &table_circuits[table]; if KECCAK_TABLES_INDICES.contains(&table) && !all_proof.use_keccak_tables { - root_inputs.set_target( - self.root.index_verifier_data[table], - F::from_canonical_u8(*index_verifier_data), + let dummy_proof = self.table_dummy_proofs[table] + .as_ref() + .ok_or_else(|| anyhow::format_err!("Unable to get dummpy proof"))?; + root_inputs.set_target(self.root.index_verifier_data[table], F::ZERO); + root_inputs.set_proof_with_pis_target( + &self.root.proof_with_pis[table], + &dummy_proof.proof, ); - // generate and set a dummy `proof_with_pis` - let common_data = &table_circuit - .shrinking_wrappers - .last() - .ok_or_else(|| anyhow::format_err!("No shrinking circuits"))? - .circuit - .common; - let dummy_circuit: CircuitData = dummy_circuit(common_data); - let dummy_pis = HashMap::new(); - let dummy_proof = dummy_proof(&dummy_circuit, dummy_pis) - .expect("Unable to generate dummy proofs"); - root_inputs - .set_proof_with_pis_target(&self.root.proof_with_pis[table], &dummy_proof); } else { - let stark_proof = &all_proof.multi_proof.stark_proofs[table]; - - let shrunk_proof = - table_circuit.shrink(stark_proof, &all_proof.multi_proof.ctl_challenges)?; + let (table_circuit, index_verifier_data) = &table_circuits[table] + .as_ref() + .ok_or_else(|| anyhow::format_err!("Unable to get circuits"))?; root_inputs.set_target( self.root.index_verifier_data[table], F::from_canonical_u8(*index_verifier_data), ); + let stark_proof = all_proof.multi_proof.stark_proofs[table] + .as_ref() + .ok_or_else(|| anyhow::format_err!("Unable to get stark proof"))?; + let shrunk_proof = + table_circuit.shrink(stark_proof, &all_proof.multi_proof.ctl_challenges)?; root_inputs .set_proof_with_pis_target(&self.root.proof_with_pis[table], &shrunk_proof); } @@ -2736,6 +2809,7 @@ where agg_inputs.set_proof_with_pis_target(&agg_child.base_proof, proof); } } + /// A map between initial degree sizes and their associated shrinking recursion /// circuits. #[derive(Eq, PartialEq, Debug)] @@ -2767,6 +2841,7 @@ where buffer.write_usize(size)?; table.to_buffer(buffer, gate_serializer, generator_serializer)?; } + Ok(()) } @@ -2786,6 +2861,7 @@ where )?; by_stark_size.insert(key, table); } + Ok(Self { by_stark_size }) } @@ -2810,6 +2886,7 @@ where ) }) .collect(); + Self { by_stark_size } } @@ -3131,79 +3208,3 @@ pub mod testing { } } } - -#[cfg(test)] -#[cfg(not(feature = "cdk_erigon"))] -mod tests { - use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::plonk::config::PoseidonGoldilocksConfig; - use plonky2::timed; - - use super::*; - use crate::testing_utils::{empty_payload, init_logger}; - use crate::witness::operation::Operation; - - type F = GoldilocksField; - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - - #[test] - fn test_segment_proof_generation_without_keccak() -> anyhow::Result<()> { - init_logger(); - - let all_stark = AllStark::::default(); - let config = StarkConfig::standard_fast_config(); - - // Generate a dummy payload for testing - let payload = empty_payload()?; - let max_cpu_len_log = Some(7); - let mut segment_iterator = SegmentDataIterator::::new(&payload, max_cpu_len_log); - let (_, mut segment_data) = segment_iterator.next().unwrap()?; - - let opcode_counts = &segment_data.opcode_counts; - assert!(!opcode_counts.contains_key(&Operation::KeccakGeneral)); - - let timing = &mut TimingTree::new( - "Segment Proof Generation Without Keccak Test", - log::Level::Info, - ); - // Process and prove segment - let all_circuits = timed!( - timing, - log::Level::Info, - "Create all recursive circuits", - AllRecursiveCircuits::::new( - &all_stark, - &[16..17, 8..9, 7..8, 4..9, 8..9, 4..7, 16..17, 16..17, 16..17], - &config, - ) - ); - - let segment_proof = timed!( - timing, - log::Level::Info, - "Prove segment", - all_circuits.prove_segment( - &all_stark, - &config, - payload.trim(), - &mut segment_data, - timing, - None, - )? - ); - - // Verify the generated segment proof - timed!( - timing, - log::Level::Info, - "Verify segment proof", - all_circuits.verify_root(segment_proof.proof_with_pvs.intern.clone())? - ); - - // Print timing details - timing.print(); - - Ok(()) - } -} diff --git a/evm_arithmetization/src/get_challenges.rs b/evm_arithmetization/src/get_challenges.rs index ca1f06c62..c7471471b 100644 --- a/evm_arithmetization/src/get_challenges.rs +++ b/evm_arithmetization/src/get_challenges.rs @@ -247,7 +247,7 @@ pub(crate) fn observe_public_values_target< pub mod testing { use plonky2::field::extension::Extendable; - use plonky2::hash::hash_types::RichField; + use plonky2::hash::hash_types::{RichField, NUM_HASH_OUT_ELTS}; use plonky2::iop::challenger::Challenger; use plonky2::plonk::config::GenericConfig; use starky::config::StarkConfig; @@ -278,19 +278,14 @@ pub mod testing { let stark_proofs = &self.multi_proof.stark_proofs; - for (i, proof) in stark_proofs.iter().enumerate() { - if KECCAK_TABLES_INDICES.contains(&i) && !self.use_keccak_tables { - // Observe zero merkle caps when skipping Keccak tables. - let zero_merkle_cap = proof - .proof - .trace_cap - .flatten() - .iter() - .map(|_| F::ZERO) - .collect::>(); - challenger.observe_elements(&zero_merkle_cap); + for (i, stark_proof) in stark_proofs.iter().enumerate() { + if let Some(stark_proof) = stark_proof { + challenger.observe_cap(&stark_proof.proof.trace_cap); } else { - challenger.observe_cap(&proof.proof.trace_cap); + assert!(KECCAK_TABLES_INDICES.contains(&i) && !self.use_keccak_tables); + let zero_cap = + vec![F::ZERO; config.fri_config.num_cap_elements() * NUM_HASH_OUT_ELTS]; + challenger.observe_elements(&zero_cap); } } @@ -301,16 +296,16 @@ pub mod testing { Ok(AllProofChallenges { stark_challenges: core::array::from_fn(|i| { - if KECCAK_TABLES_INDICES.contains(&i) && !self.use_keccak_tables { - None - } else { + if let Some(stark_proof) = &stark_proofs[i] { challenger.compact(); - Some(stark_proofs[i].proof.get_challenges( + Some(stark_proof.proof.get_challenges( &mut challenger, Some(&ctl_challenges), true, config, )) + } else { + None } }), ctl_challenges, diff --git a/evm_arithmetization/src/proof.rs b/evm_arithmetization/src/proof.rs index 304072aed..b0de028af 100644 --- a/evm_arithmetization/src/proof.rs +++ b/evm_arithmetization/src/proof.rs @@ -10,7 +10,8 @@ use plonky2::plonk::config::{GenericConfig, GenericHashOut, Hasher}; use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; use serde::{Deserialize, Serialize}; use starky::config::StarkConfig; -use starky::proof::MultiProof; +use starky::lookup::GrandProductChallengeSet; +use starky::proof::StarkProofWithMetadata; use crate::all_stark::NUM_TABLES; use crate::util::{get_h160, get_h256, get_u256, h256_limbs, h2u}; @@ -21,6 +22,22 @@ pub(crate) const DEFAULT_CAP_HEIGHT: usize = 4; /// Number of elements contained in a Merkle cap with default height. pub(crate) const DEFAULT_CAP_LEN: usize = 1 << DEFAULT_CAP_HEIGHT; +/// A combination of STARK proofs for independent statements operating on +/// possibly shared variables, along with Cross-Table Lookup (CTL) challenges to +/// assert consistency of common variables across tables. +#[derive(Debug, Clone)] +pub struct MultiProof< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, + const N: usize, +> { + /// Proofs for all the different STARK modules. + pub stark_proofs: [Option>; N], + /// Cross-table lookup challenges. + pub ctl_challenges: GrandProductChallengeSet, +} + /// A STARK proof for each table, plus some metadata used to create recursive /// wrapper proofs. #[derive(Debug, Clone)] @@ -38,8 +55,12 @@ pub struct AllProof, C: GenericConfig, co impl, C: GenericConfig, const D: usize> AllProof { /// Returns the degree (i.e. the trace length) of each STARK. - pub fn degree_bits(&self, config: &StarkConfig) -> [usize; NUM_TABLES] { - self.multi_proof.recover_degree_bits(config) + pub fn degree_bits(&self, config: &StarkConfig) -> [Option; NUM_TABLES] { + core::array::from_fn(|i| { + self.multi_proof.stark_proofs[i] + .as_ref() + .map(|proof| proof.proof.recover_degree_bits(config)) + }) } } diff --git a/evm_arithmetization/src/prover.rs b/evm_arithmetization/src/prover.rs index fdc5eca20..4192db6c8 100644 --- a/evm_arithmetization/src/prover.rs +++ b/evm_arithmetization/src/prover.rs @@ -16,7 +16,7 @@ use plonky2::util::timing::TimingTree; use starky::config::StarkConfig; use starky::cross_table_lookup::{get_ctl_data, CtlData}; use starky::lookup::GrandProductChallengeSet; -use starky::proof::{MultiProof, StarkProofWithMetadata}; +use starky::proof::StarkProofWithMetadata; use starky::prover::prove_with_commitment; use starky::stark::Stark; @@ -24,7 +24,7 @@ use crate::all_stark::{AllStark, Table, KECCAK_TABLES_INDICES, NUM_TABLES}; use crate::cpu::kernel::aggregator::KERNEL; use crate::generation::{generate_traces, GenerationInputs, TrimmedGenerationInputs}; use crate::get_challenges::observe_public_values; -use crate::proof::{AllProof, MemCap, PublicValues, DEFAULT_CAP_LEN}; +use crate::proof::{AllProof, MemCap, MultiProof, PublicValues, DEFAULT_CAP_LEN}; use crate::GenerationSegmentData; /// Generate traces, then create all STARK proofs. @@ -220,7 +220,7 @@ where } type ProofWithMemCaps = ( - [StarkProofWithMetadata; NUM_TABLES], + [Option>; NUM_TABLES], MerkleCap, MerkleCap, ); @@ -273,16 +273,16 @@ where let (arithmetic_proof, _) = prove_table!(arithmetic_stark, Table::Arithmetic); let (byte_packing_proof, _) = prove_table!(byte_packing_stark, Table::BytePacking); let (cpu_proof, _) = prove_table!(cpu_stark, Table::Cpu); - let challenger_after_cpu = challenger.clone(); - // TODO(sdeng): Keccak proofs are still required for CTLs, etc. Refactor the - // code and remove the unnecessary parts. - let (keccak_proof, _) = prove_table!(keccak_stark, Table::Keccak); - let (keccak_sponge_proof, _) = prove_table!(keccak_sponge_stark, Table::KeccakSponge); - if !use_keccak_tables { - // We need to connect the challenger state of CPU and Logic tables when the - // Keccak tables are not in use. - *challenger = challenger_after_cpu; - } + let keccak_proof = if use_keccak_tables { + Some(prove_table!(keccak_stark, Table::Keccak).0) + } else { + None + }; + let keccak_sponge_proof = if use_keccak_tables { + Some(prove_table!(keccak_sponge_stark, Table::KeccakSponge).0) + } else { + None + }; let (logic_proof, _) = prove_table!(logic_stark, Table::Logic); let (memory_proof, _) = prove_table!(memory_stark, Table::Memory); let (mem_before_proof, mem_before_cap) = prove_table!(mem_before_stark, Table::MemBefore); @@ -293,17 +293,17 @@ where Ok(( [ - arithmetic_proof, - byte_packing_proof, - cpu_proof, + Some(arithmetic_proof), + Some(byte_packing_proof), + Some(cpu_proof), keccak_proof, keccak_sponge_proof, - logic_proof, - memory_proof, - mem_before_proof, - mem_after_proof, + Some(logic_proof), + Some(memory_proof), + Some(mem_before_proof), + Some(mem_after_proof), #[cfg(feature = "cdk_erigon")] - poseidon_proof, + Some(poseidon_proof), ], mem_before_cap, mem_after_cap, diff --git a/evm_arithmetization/src/testing_utils.rs b/evm_arithmetization/src/testing_utils.rs index 5f1b1b041..039dc2504 100644 --- a/evm_arithmetization/src/testing_utils.rs +++ b/evm_arithmetization/src/testing_utils.rs @@ -10,12 +10,17 @@ use mpt_trie::{ nibbles::Nibbles, partial_trie::{HashedPartialTrie, Node, PartialTrie}, }; +use plonky2::field::goldilocks_field::GoldilocksField; pub use crate::cpu::kernel::cancun_constants::*; pub use crate::cpu::kernel::constants::global_exit_root::*; -use crate::generation::TrieInputs; +use crate::generation::{TrieInputs, TrimmedGenerationInputs}; use crate::proof::TrieRoots; -use crate::{generation::mpt::AccountRlp, proof::BlockMetadata, util::h2u, GenerationInputs}; +use crate::witness::operation::Operation; +use crate::{ + generation::mpt::AccountRlp, proof::BlockMetadata, util::h2u, GenerationInputs, + GenerationSegmentData, SegmentDataIterator, +}; pub const EMPTY_NODE_HASH: H256 = H256(hex!( "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" @@ -165,7 +170,7 @@ pub fn scalable_contract_from_storage(storage_trie: &HashedPartialTrie) -> Accou } } -pub fn empty_payload() -> Result { +fn empty_payload() -> Result { // Set up default block metadata let block_metadata = BlockMetadata { block_beneficiary: Address::zero(), @@ -213,3 +218,19 @@ pub fn empty_payload() -> Result { Ok(inputs) } + +pub fn segment_with_empty_tables() -> Result<( + TrimmedGenerationInputs, + GenerationSegmentData, +)> { + let payload = empty_payload()?; + let max_cpu_len_log = Some(7); + let mut segment_iterator = + SegmentDataIterator::::new(&payload, max_cpu_len_log); + let (trimmed_inputs, segment_data) = segment_iterator.next().unwrap()?; + + let opcode_counts = &segment_data.opcode_counts; + assert!(!opcode_counts.contains_key(&Operation::KeccakGeneral)); + + Ok((trimmed_inputs, segment_data)) +} diff --git a/evm_arithmetization/src/verifier.rs b/evm_arithmetization/src/verifier.rs index 947516801..81e9b502b 100644 --- a/evm_arithmetization/src/verifier.rs +++ b/evm_arithmetization/src/verifier.rs @@ -85,7 +85,9 @@ pub mod testing { use plonky2::hash::hash_types::RichField; use plonky2::plonk::config::{GenericConfig, GenericHashOut}; use starky::config::StarkConfig; - use starky::cross_table_lookup::{get_ctl_vars_from_proofs, verify_cross_table_lookups}; + use starky::cross_table_lookup::verify_cross_table_lookups; + use starky::cross_table_lookup::CrossTableLookup; + use starky::cross_table_lookup::CtlCheckVars; use starky::lookup::GrandProductChallenge; use starky::stark::Stark; use starky::verifier::verify_stark_proof_with_challenges; @@ -184,25 +186,39 @@ pub mod testing { cross_table_lookups, } = all_stark; - let ctl_vars_per_table = get_ctl_vars_from_proofs( - &all_proof.multi_proof, - cross_table_lookups, - &ctl_challenges, - &num_lookup_columns, - all_stark.arithmetic_stark.constraint_degree(), - ); - let stark_proofs = &all_proof.multi_proof.stark_proofs; macro_rules! verify_table { ($stark:ident, $table:expr) => { + let stark_proof = &stark_proofs[*$table] + .as_ref() + .expect("Missing stark_proof") + .proof; + let ctl_vars = { + let (total_num_helpers, _, num_helpers_by_ctl) = + CrossTableLookup::num_ctl_helpers_zs_all( + &all_stark.cross_table_lookups, + *$table, + config.num_challenges, + $stark.constraint_degree(), + ); + CtlCheckVars::from_proof( + *$table, + &stark_proof, + &all_stark.cross_table_lookups, + &ctl_challenges, + num_lookup_columns[*$table], + total_num_helpers, + &num_helpers_by_ctl, + ) + }; verify_stark_proof_with_challenges( $stark, - &stark_proofs[*$table].proof, + stark_proof, &stark_challenges[*$table] .as_ref() .expect("Missing challenges"), - Some(&ctl_vars_per_table[*$table]), + Some(&ctl_vars), &[], config, )?; @@ -240,12 +256,39 @@ pub mod testing { .map(|i| get_memory_extra_looking_sum(&public_values, ctl_challenges.challenges[i])) .collect_vec(); + let all_ctls = &all_stark.cross_table_lookups; + verify_cross_table_lookups::( cross_table_lookups, - all_proof - .multi_proof - .stark_proofs - .map(|p| p.proof.openings.ctl_zs_first.unwrap()), + core::array::from_fn(|i| { + if let Some(stark_proof) = &stark_proofs[i] { + stark_proof + .proof + .openings + .ctl_zs_first + .as_ref() + .expect("Missing ctl_zs") + .clone() + } else if i == *Table::Keccak { + let (_, n, _) = CrossTableLookup::num_ctl_helpers_zs_all( + all_ctls, + *Table::Keccak, + config.num_challenges, + keccak_stark.constraint_degree(), + ); + vec![F::ZERO; n] + } else if i == *Table::KeccakSponge { + let (_, n, _) = CrossTableLookup::num_ctl_helpers_zs_all( + all_ctls, + *Table::KeccakSponge, + config.num_challenges, + keccak_sponge_stark.constraint_degree(), + ); + vec![F::ZERO; n] + } else { + panic!("Unable to find stark_proof"); + } + }), Some(&extra_looking_sums), config, ) diff --git a/evm_arithmetization/tests/empty_tables.rs b/evm_arithmetization/tests/empty_tables.rs new file mode 100644 index 000000000..d25901e24 --- /dev/null +++ b/evm_arithmetization/tests/empty_tables.rs @@ -0,0 +1,80 @@ +#![cfg(feature = "eth_mainnet")] + +use evm_arithmetization::fixed_recursive_verifier::AllRecursiveCircuits; +use evm_arithmetization::prover::prove; +use evm_arithmetization::testing_utils::{init_logger, segment_with_empty_tables}; +use evm_arithmetization::verifier::testing::verify_all_proofs; +use evm_arithmetization::AllStark; +use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::plonk::config::PoseidonGoldilocksConfig; +use plonky2::timed; +use plonky2::util::timing::TimingTree; +use starky::config::StarkConfig; + +/// This test focuses on testing zkVM proofs with some empty tables. +#[test] +fn empty_tables() -> anyhow::Result<()> { + type F = GoldilocksField; + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + + init_logger(); + + let all_stark = AllStark::::default(); + let config = StarkConfig::standard_fast_config(); + let timing = &mut TimingTree::new("Empty Table Test", log::Level::Info); + + // Generate segment data + let (payload, mut segment_data) = segment_with_empty_tables()?; + + // Create all STARK proofs + let mut proofs = vec![]; + let proof = timed!( + timing, + log::Level::Info, + "Create all STARK proofs", + prove::( + &all_stark, + &config, + payload, + &mut segment_data, + timing, + None, + )? + ); + proofs.push(proof); + + // Verify the generated STARK proofs + verify_all_proofs(&all_stark, &proofs, &config)?; + + // Process and generate segment proof + let all_circuits = timed!( + timing, + log::Level::Info, + "Create all recursive circuits", + AllRecursiveCircuits::::new( + &all_stark, + &[16..17, 8..9, 7..8, 4..6, 8..9, 4..5, 16..17, 16..17, 16..17], + &config, + ) + ); + let segment_proof = timed!( + timing, + log::Level::Info, + "Prove segment", + all_circuits.prove_segment_with_all_proofs(&proofs[0], &config, None)? + ); + + // Verify the generated segment proof + timed!( + timing, + log::Level::Info, + "Verify segment proof", + all_circuits.verify_root(segment_proof.proof_with_pvs.intern.clone())? + ); + + // Print timing details + timing.print(); + + Ok(()) +} diff --git a/trace_decoder/Cargo.toml b/trace_decoder/Cargo.toml index de9b389a2..5bdd24f12 100644 --- a/trace_decoder/Cargo.toml +++ b/trace_decoder/Cargo.toml @@ -15,6 +15,7 @@ alloy-compat = "0.1.0" anyhow.workspace = true bitflags.workspace = true bitvec.workspace = true +build-array = "0.1.2" bytes.workspace = true ciborium.workspace = true ciborium-io.workspace = true @@ -33,6 +34,7 @@ nunny = { workspace = true, features = ["serde"] } plonky2.workspace = true rlp.workspace = true serde.workspace = true +smt_trie.workspace = true stackstack = "0.3.0" strum = { version = "0.26.3", features = ["derive"] } thiserror.workspace = true @@ -52,7 +54,6 @@ libtest-mimic = "0.7.3" plonky2_maybe_rayon.workspace = true serde_json.workspace = true serde_path_to_error.workspace = true -smt_trie.workspace = true zero.workspace = true [features] diff --git a/trace_decoder/benches/block_processing.rs b/trace_decoder/benches/block_processing.rs index adefdae3f..4e3582e98 100644 --- a/trace_decoder/benches/block_processing.rs +++ b/trace_decoder/benches/block_processing.rs @@ -8,6 +8,7 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; use trace_decoder::observer::DummyObserver; use trace_decoder::{BlockTrace, OtherBlockData}; +use zero::prover::WIRE_DISPOSITION; #[derive(Clone, Debug, serde::Deserialize)] pub struct ProverInput { @@ -39,6 +40,7 @@ fn criterion_benchmark(c: &mut Criterion) { other_data, batch_size, &mut DummyObserver::new(), + WIRE_DISPOSITION, ) .unwrap() }, diff --git a/trace_decoder/src/core.rs b/trace_decoder/src/core.rs index 97969bf52..a049680e3 100644 --- a/trace_decoder/src/core.rs +++ b/trace_decoder/src/core.rs @@ -12,6 +12,7 @@ use alloy::{ }; use alloy_compat::Compat as _; use anyhow::{anyhow, bail, ensure, Context as _}; +use either::Either; use ethereum_types::{Address, H160, U256}; use evm_arithmetization::{ generation::{mpt::AccountRlp, TrieInputs}, @@ -25,9 +26,12 @@ use mpt_trie::partial_trie::PartialTrie as _; use nunny::NonEmpty; use zk_evm_common::gwei_to_wei; -use crate::observer::Observer; use crate::{ - typed_mpt::{ReceiptTrie, StateMpt, StateTrie, StorageTrie, TransactionTrie, TrieKey}, + observer::{DummyObserver, Observer}, + tries::StateSmt, +}; +use crate::{ + tries::{MptKey, ReceiptTrie, StateMpt, StateTrie, StorageTrie, TransactionTrie}, BlockLevelData, BlockTrace, BlockTraceTriePreImages, CombinedPreImages, ContractCodeUsage, OtherBlockData, SeparateStorageTriesPreImage, SeparateTriePreImage, SeparateTriePreImages, TxnInfo, TxnMeta, TxnTrace, @@ -51,12 +55,24 @@ pub fn is_precompile(addr: H160) -> bool { == address!("0000000000000000000000000000000000000100")) } +/// Expected trie type when parsing from binary in a [`BlockTrace`]. +/// +/// See [`crate::wire`] and [`CombinedPreImages`] for more. +#[derive(Debug)] +pub enum WireDisposition { + /// MPT + Type1, + /// SMT + Type2, +} + /// TODO(0xaatif): document this after pub fn entrypoint( trace: BlockTrace, other: OtherBlockData, batch_size_hint: usize, observer: &mut impl Observer, + wire_disposition: WireDisposition, ) -> anyhow::Result> { ensure!(batch_size_hint != 0); @@ -70,8 +86,8 @@ pub fn entrypoint( BlockTraceTriePreImages::Separate(_) => FatalMissingCode(true), BlockTraceTriePreImages::Combined(_) => FatalMissingCode(false), }; + let (state, storage, mut code) = start(trie_pre_images, wire_disposition)?; - let (state, storage, mut code) = start(trie_pre_images)?; code.extend(code_db.clone()); let OtherBlockData { @@ -91,17 +107,40 @@ pub fn entrypoint( *amt = gwei_to_wei(*amt) } - let batches = middle( - state, - storage, - batch(txn_info, batch_size_hint), - &mut code, - &b_meta, - ger_data, - withdrawals, - fatal_missing_code, - observer, - )?; + let batches = match state { + Either::Left(mpt) => Either::Left( + middle( + mpt, + storage, + batch(txn_info, batch_size_hint), + &mut code, + &b_meta, + ger_data, + withdrawals, + fatal_missing_code, + observer, + )? + .into_iter() + .map(|it| it.map(Either::Left)), + ), + Either::Right(smt) => { + Either::Right( + middle( + smt, + storage, + batch(txn_info, batch_size_hint), + &mut code, + &b_meta, + ger_data, + withdrawals, + fatal_missing_code, + &mut DummyObserver::new(), // TODO(0xaatif) + )? + .into_iter() + .map(|it| it.map(Either::Right)), + ) + } + }; let mut running_gas_used = 0; Ok(batches @@ -133,7 +172,10 @@ pub fn entrypoint( withdrawals, ger_data, tries: TrieInputs { - state_trie: state.into(), + state_trie: match state { + Either::Left(mpt) => mpt.into(), + Either::Right(_) => todo!("evm_arithmetization accepts an SMT"), + }, transactions_trie: transaction.into(), receipts_trie: receipt.into(), storage_tries: storage.into_iter().map(|(k, v)| (k, v.into())).collect(), @@ -181,11 +223,16 @@ pub fn entrypoint( /// [`HashedPartialTrie`](mpt_trie::partial_trie::HashedPartialTrie), /// or a [`wire`](crate::wire)-encoded representation of one. /// -/// Turn either of those into our [`typed_mpt`](crate::typed_mpt) -/// representations. +/// Turn either of those into our [internal representations](crate::tries). +#[allow(clippy::type_complexity)] fn start( pre_images: BlockTraceTriePreImages, -) -> anyhow::Result<(StateMpt, BTreeMap, Hash2Code)> { + wire_disposition: WireDisposition, +) -> anyhow::Result<( + Either, + BTreeMap, + Hash2Code, +)> { Ok(match pre_images { // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/401 // refactor our convoluted input types @@ -196,7 +243,7 @@ fn start( let state = state.items().try_fold( StateMpt::default(), |mut acc, (nibbles, hash_or_val)| { - let path = TrieKey::from_nibbles(nibbles); + let path = MptKey::from_nibbles(nibbles); match hash_or_val { mpt_trie::trie_ops::ValOrHash::Val(bytes) => { #[expect(deprecated)] // this is MPT specific @@ -219,7 +266,7 @@ fn start( .map(|(k, SeparateTriePreImage::Direct(v))| { v.items() .try_fold(StorageTrie::default(), |mut acc, (nibbles, hash_or_val)| { - let path = TrieKey::from_nibbles(nibbles); + let path = MptKey::from_nibbles(nibbles); match hash_or_val { mpt_trie::trie_ops::ValOrHash::Val(value) => { acc.insert(path, value)?; @@ -233,17 +280,35 @@ fn start( .map(|v| (k, v)) }) .collect::>()?; - (state, storage, Hash2Code::new()) + (Either::Left(state), storage, Hash2Code::new()) } BlockTraceTriePreImages::Combined(CombinedPreImages { compact }) => { let instructions = crate::wire::parse(&compact) .context("couldn't parse instructions from binary format")?; - let crate::type1::Frontend { - state, - storage, - code, - } = crate::type1::frontend(instructions)?; - (state, storage, code.into_iter().map(Into::into).collect()) + let (state, storage, code) = match wire_disposition { + WireDisposition::Type1 => { + let crate::type1::Frontend { + state, + storage, + code, + } = crate::type1::frontend(instructions)?; + ( + Either::Left(state), + storage, + Hash2Code::from_iter(code.into_iter().map(NonEmpty::into_vec)), + ) + } + WireDisposition::Type2 => { + let crate::type2::Frontend { trie, code } = + crate::type2::frontend(instructions)?; + ( + Either::Right(trie), + BTreeMap::new(), + Hash2Code::from_iter(code.into_iter().map(NonEmpty::into_vec)), + ) + } + }; + (state, storage, code) } }) } @@ -319,6 +384,31 @@ struct Batch { pub jumpdest_tables: Vec>, } +impl Batch { + fn map(self, f: impl FnMut(T) -> U) -> Batch { + let Self { + first_txn_ix, + gas_used, + contract_code, + byte_code, + before, + after, + withdrawals, + jumpdest_tables, + } = self; + Batch { + first_txn_ix, + gas_used, + contract_code, + byte_code, + before: before.map(f), + after, + withdrawals, + jumpdest_tables, + } + } +} + /// [`evm_arithmetization::generation::TrieInputs`], /// generic over state trie representation. #[derive(Debug)] @@ -329,6 +419,22 @@ pub struct IntraBlockTries { pub receipt: ReceiptTrie, } +impl IntraBlockTries { + fn map(self, mut f: impl FnMut(T) -> U) -> IntraBlockTries { + let Self { + state, + storage, + transaction, + receipt, + } = self; + IntraBlockTries { + state: f(state), + storage, + transaction, + receipt, + } + } +} /// Hacky handling of possibly missing contract bytecode in `Hash2Code` inner /// map. /// Allows incomplete payloads fetched with the zero tracer to skip these @@ -355,12 +461,15 @@ fn middle( fatal_missing_code: FatalMissingCode, // called with the untrimmed tries after each batch observer: &mut impl Observer, -) -> anyhow::Result>> { +) -> anyhow::Result>> +where + StateTrieT::Key: Ord + From
, +{ // Initialise the storage tries. for (haddr, acct) in state_trie.iter() { let storage = storage_tries.entry(haddr).or_insert({ let mut it = StorageTrie::default(); - it.insert_hash(TrieKey::default(), acct.storage_root) + it.insert_hash(MptKey::default(), acct.storage_root) .expect("empty trie insert cannot fail"); it }); @@ -395,8 +504,8 @@ fn middle( // We want to perform mask the TrieInputs above, // but won't know the bounds until after the loop below, // so store that information here. - let mut storage_masks = BTreeMap::<_, BTreeSet>::new(); - let mut state_mask = BTreeSet::new(); + let mut storage_masks = BTreeMap::<_, BTreeSet>::new(); + let mut state_mask = BTreeSet::::new(); if txn_ix == 0 { do_pre_execution( @@ -495,7 +604,7 @@ fn middle( storage_written .keys() .chain(&storage_read) - .map(|it| TrieKey::from_hash(keccak_hash::keccak(it))), + .map(|it| MptKey::from_hash(keccak_hash::keccak(it))), ); if do_writes { @@ -542,7 +651,7 @@ fn middle( }; for (k, v) in storage_written { - let slot = TrieKey::from_hash(keccak_hash::keccak(k)); + let slot = MptKey::from_hash(keccak_hash::keccak(k)); match v.is_zero() { // this is actually a delete true => storage_mask.extend(storage.reporting_remove(slot)?), @@ -555,10 +664,10 @@ fn middle( } state_trie.insert_by_address(addr, acct)?; - state_mask.insert(TrieKey::from_address(addr)); + state_mask.insert(::from(addr)); } else { // Simple state access - state_mask.insert(TrieKey::from_address(addr)); + state_mask.insert(::from(addr)); } if self_destructed { @@ -583,7 +692,7 @@ fn middle( withdrawals: match loop_ix == loop_len { true => { for (addr, amt) in &withdrawals { - state_mask.insert(TrieKey::from_address(*addr)); + state_mask.insert(::from(*addr)); let mut acct = state_trie .get_by_address(*addr) .context(format!("missing address {addr:x} for withdrawal"))?; @@ -642,10 +751,13 @@ fn do_pre_execution( block: &BlockMetadata, ger_data: Option<(H256, H256)>, storage: &mut BTreeMap, - trim_storage: &mut BTreeMap>, - trim_state: &mut BTreeSet, + trim_storage: &mut BTreeMap>, + trim_state: &mut BTreeSet, state_trie: &mut StateTrieT, -) -> anyhow::Result<()> { +) -> anyhow::Result<()> +where + StateTrieT::Key: From
+ Ord, +{ // Ethereum mainnet: EIP-4788 if cfg!(feature = "eth_mainnet") { return do_beacon_hook( @@ -681,10 +793,13 @@ fn do_scalable_hook( block: &BlockMetadata, ger_data: Option<(H256, H256)>, storage: &mut BTreeMap, - trim_storage: &mut BTreeMap>, - trim_state: &mut BTreeSet, + trim_storage: &mut BTreeMap>, + trim_state: &mut BTreeSet, state_trie: &mut StateTrieT, -) -> anyhow::Result<()> { +) -> anyhow::Result<()> +where + StateTrieT::Key: From
+ Ord, +{ use evm_arithmetization::testing_utils::{ ADDRESS_SCALABLE_L2, ADDRESS_SCALABLE_L2_ADDRESS_HASHED, GLOBAL_EXIT_ROOT_ADDRESS, GLOBAL_EXIT_ROOT_ADDRESS_HASHED, GLOBAL_EXIT_ROOT_STORAGE_POS, LAST_BLOCK_STORAGE_POS, @@ -699,7 +814,7 @@ fn do_scalable_hook( .context("missing scalable contract storage trie")?; let scalable_trim = trim_storage.entry(ADDRESS_SCALABLE_L2).or_default(); - let timestamp_slot_key = TrieKey::from_slot_position(U256::from(TIMESTAMP_STORAGE_POS.1)); + let timestamp_slot_key = MptKey::from_slot_position(U256::from(TIMESTAMP_STORAGE_POS.1)); let timestamp = scalable_storage .get(×tamp_slot_key) @@ -713,7 +828,7 @@ fn do_scalable_hook( (U256::from(LAST_BLOCK_STORAGE_POS.1), block.block_number), (U256::from(TIMESTAMP_STORAGE_POS.1), timestamp), ] { - let slot = TrieKey::from_slot_position(ix); + let slot = MptKey::from_slot_position(ix); // These values are never 0. scalable_storage.insert(slot, alloy::rlp::encode(u.compat()))?; @@ -726,12 +841,12 @@ fn do_scalable_hook( let mut arr = [0; 64]; (block.block_number - 1).to_big_endian(&mut arr[0..32]); U256::from(STATE_ROOT_STORAGE_POS.1).to_big_endian(&mut arr[32..64]); - let slot = TrieKey::from_hash(keccak_hash::keccak(arr)); + let slot = MptKey::from_hash(keccak_hash::keccak(arr)); scalable_storage.insert(slot, alloy::rlp::encode(prev_block_root_hash.compat()))?; scalable_trim.insert(slot); - trim_state.insert(TrieKey::from_address(ADDRESS_SCALABLE_L2)); + trim_state.insert(::from(ADDRESS_SCALABLE_L2)); let mut scalable_acct = state_trie .get_by_address(ADDRESS_SCALABLE_L2) .context("missing scalable contract address")?; @@ -752,12 +867,12 @@ fn do_scalable_hook( let mut arr = [0; 64]; arr[0..32].copy_from_slice(&root.0); U256::from(GLOBAL_EXIT_ROOT_STORAGE_POS.1).to_big_endian(&mut arr[32..64]); - let slot = TrieKey::from_hash(keccak_hash::keccak(arr)); + let slot = MptKey::from_hash(keccak_hash::keccak(arr)); ger_storage.insert(slot, alloy::rlp::encode(l1blockhash.compat()))?; ger_trim.insert(slot); - trim_state.insert(TrieKey::from_address(GLOBAL_EXIT_ROOT_ADDRESS)); + trim_state.insert(::from(GLOBAL_EXIT_ROOT_ADDRESS)); let mut ger_acct = state_trie .get_by_address(GLOBAL_EXIT_ROOT_ADDRESS) .context("missing GER contract address")?; @@ -780,11 +895,14 @@ fn do_scalable_hook( fn do_beacon_hook( block_timestamp: U256, storage: &mut BTreeMap, - trim_storage: &mut BTreeMap>, + trim_storage: &mut BTreeMap>, parent_beacon_block_root: H256, - trim_state: &mut BTreeSet, + trim_state: &mut BTreeSet, state_trie: &mut StateTrieT, -) -> anyhow::Result<()> { +) -> anyhow::Result<()> +where + StateTrieT::Key: From
+ Ord, +{ use evm_arithmetization::testing_utils::{ BEACON_ROOTS_CONTRACT_ADDRESS, BEACON_ROOTS_CONTRACT_ADDRESS_HASHED, HISTORY_BUFFER_LENGTH, }; @@ -805,7 +923,7 @@ fn do_beacon_hook( U256::from_big_endian(parent_beacon_block_root.as_bytes()), ), ] { - let slot = TrieKey::from_slot_position(ix); + let slot = MptKey::from_slot_position(ix); beacon_trim.insert(slot); match u.is_zero() { @@ -816,7 +934,7 @@ fn do_beacon_hook( } } } - trim_state.insert(TrieKey::from_address(BEACON_ROOTS_CONTRACT_ADDRESS)); + trim_state.insert(::from(BEACON_ROOTS_CONTRACT_ADDRESS)); let mut beacon_acct = state_trie .get_by_address(BEACON_ROOTS_CONTRACT_ADDRESS) .context("missing beacon contract address")?; @@ -844,7 +962,7 @@ fn map_receipt_bytes(bytes: Vec) -> anyhow::Result> { /// get added here as we process the deltas. #[derive(Default)] struct Hash2Code { - /// Key must always be [`hash`] of value. + /// Key must always be [`hash`](keccak_hash) of value. inner: HashMap>, } diff --git a/trace_decoder/src/lib.rs b/trace_decoder/src/lib.rs index bf7ff71cc..aa2f0a0f7 100644 --- a/trace_decoder/src/lib.rs +++ b/trace_decoder/src/lib.rs @@ -56,17 +56,12 @@ mod interface; pub use interface::*; +mod tries; mod type1; -// TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 -// add backend/prod support for type 2 -#[cfg(test)] -#[allow(dead_code)] mod type2; -mod typed_mpt; mod wire; -pub use core::entrypoint; -pub use core::is_precompile; +pub use core::{entrypoint, is_precompile, WireDisposition}; mod core; diff --git a/trace_decoder/src/observer.rs b/trace_decoder/src/observer.rs index 320019e55..f9811e87c 100644 --- a/trace_decoder/src/observer.rs +++ b/trace_decoder/src/observer.rs @@ -4,7 +4,7 @@ use std::marker::PhantomData; use ethereum_types::{H256, U256}; use crate::core::IntraBlockTries; -use crate::typed_mpt::{ReceiptTrie, StorageTrie, TransactionTrie}; +use crate::tries::{ReceiptTrie, StorageTrie, TransactionTrie}; /// Observer API for the trace decoder. /// Observer is used to collect various debugging and metadata info diff --git a/trace_decoder/src/typed_mpt.rs b/trace_decoder/src/tries.rs similarity index 58% rename from trace_decoder/src/typed_mpt.rs rename to trace_decoder/src/tries.rs index 8baf3cf29..91add4d98 100644 --- a/trace_decoder/src/typed_mpt.rs +++ b/trace_decoder/src/tries.rs @@ -1,10 +1,12 @@ -//! Principled MPT types used in this library. +//! Principled trie types and abstractions used in this library. use core::fmt; -use std::{collections::BTreeMap, marker::PhantomData}; +use std::{cmp, collections::BTreeMap, marker::PhantomData}; +use anyhow::ensure; +use bitvec::{array::BitArray, slice::BitSlice}; use copyvec::CopyVec; -use ethereum_types::{Address, H256, U256}; +use ethereum_types::{Address, BigEndianHash as _, H256, U256}; use evm_arithmetization::generation::mpt::AccountRlp; use mpt_trie::partial_trie::{HashedPartialTrie, Node, OnOrphanedHashNode, PartialTrie as _}; use u4::{AsNibbles, U4}; @@ -30,27 +32,26 @@ impl TypedMpt { /// Insert a node which represents an out-of-band sub-trie. /// /// See [module documentation](super) for more. - fn insert_hash(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()> { + fn insert_hash(&mut self, key: MptKey, hash: H256) -> anyhow::Result<()> { self.inner.insert(key.into_nibbles(), hash)?; Ok(()) } - /// Returns an [`Error`] if the `key` crosses into a part of the trie that - /// isn't hydrated. - fn insert(&mut self, key: TrieKey, value: T) -> anyhow::Result> + /// Returns [`Err`] if the `key` crosses into a part of the trie that + /// is hashed out. + fn insert(&mut self, key: MptKey, value: T) -> anyhow::Result<()> where T: rlp::Encodable + rlp::Decodable, { - let prev = self.get(key); self.inner .insert(key.into_nibbles(), rlp::encode(&value).to_vec())?; - Ok(prev) + Ok(()) } /// Note that this returns [`None`] if `key` crosses into a part of the - /// trie that isn't hydrated. + /// trie that is hashed out. /// /// # Panics /// - If [`rlp::decode`]-ing for `T` doesn't round-trip. - fn get(&self, key: TrieKey) -> Option + fn get(&self, key: MptKey) -> Option where T: rlp::Decodable, { @@ -67,12 +68,12 @@ impl TypedMpt { self.inner.hash() } /// Note that this returns owned paths and items. - fn iter(&self) -> impl Iterator + '_ + fn iter(&self) -> impl Iterator + '_ where T: rlp::Decodable, { self.inner.keys().filter_map(|nib| { - let path = TrieKey::from_nibbles(nib); + let path = MptKey::from_nibbles(nib); Some((path, self.get(path)?)) }) } @@ -88,7 +89,7 @@ impl<'a, T> IntoIterator for &'a TypedMpt where T: rlp::Decodable, { - type Item = (TrieKey, T); + type Item = (MptKey, T); type IntoIter = Box + 'a>; fn into_iter(self) -> Self::IntoIter { Box::new(self.iter()) @@ -100,9 +101,9 @@ where /// /// Semantically equivalent to [`mpt_trie::nibbles::Nibbles`]. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] -pub struct TrieKey(CopyVec); +pub struct MptKey(CopyVec); -impl fmt::Display for TrieKey { +impl fmt::Display for MptKey { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for u in self.0 { f.write_fmt(format_args!("{:x}", u))? @@ -111,9 +112,9 @@ impl fmt::Display for TrieKey { } } -impl TrieKey { +impl MptKey { pub fn new(components: impl IntoIterator) -> anyhow::Result { - Ok(TrieKey(CopyVec::try_from_iter(components)?)) + Ok(MptKey(CopyVec::try_from_iter(components)?)) } pub fn into_hash_left_padded(mut self) -> H256 { for _ in 0..self.0.spare_capacity_mut().len() { @@ -136,7 +137,7 @@ impl TrieKey { } pub fn from_txn_ix(txn_ix: usize) -> Self { - TrieKey::new(AsNibbles(rlp::encode(&txn_ix))).expect( + MptKey::new(AsNibbles(rlp::encode(&txn_ix))).expect( "\ rlp of an usize goes through a u64, which is 8 bytes, which will be 9 bytes RLP'ed. @@ -170,17 +171,111 @@ impl TrieKey { } } +impl From
for MptKey { + fn from(value: Address) -> Self { + Self::from_hash(keccak_hash::keccak(value)) + } +} + #[test] -fn key_into_hash() { - assert_eq!(TrieKey::new([]).unwrap().into_hash(), None); +fn mpt_key_into_hash() { + assert_eq!(MptKey::new([]).unwrap().into_hash(), None); assert_eq!( - TrieKey::new(itertools::repeat_n(u4::u4!(0), 64)) + MptKey::new(itertools::repeat_n(u4::u4!(0), 64)) .unwrap() .into_hash(), Some(H256::zero()) ) } +/// Bounded sequence of bits, +/// used as a key for [`StateSmt`]. +/// +/// Semantically equivalent to [`smt_trie::bits::Bits`]. +#[derive(Clone, Copy)] +pub struct SmtKey { + bits: bitvec::array::BitArray<[u8; 32]>, + len: usize, +} + +impl SmtKey { + fn as_bitslice(&self) -> &BitSlice { + self.bits.as_bitslice().get(..self.len).unwrap() + } +} + +impl fmt::Debug for SmtKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.as_bitslice().iter().map(|it| match *it { + true => 1, + false => 0, + })) + .finish() + } +} + +impl fmt::Display for SmtKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for bit in self.as_bitslice() { + f.write_str(match *bit { + true => "1", + false => "0", + })? + } + Ok(()) + } +} + +impl SmtKey { + pub fn new(components: impl IntoIterator) -> anyhow::Result { + let mut bits = bitvec::array::BitArray::default(); + let mut len = 0; + for (ix, bit) in components.into_iter().enumerate() { + ensure!( + bits.get(ix).is_some(), + "expected at most {} components", + bits.len() + ); + bits.set(ix, bit); + len += 1 + } + Ok(Self { bits, len }) + } + + pub fn into_smt_bits(self) -> smt_trie::bits::Bits { + let mut bits = smt_trie::bits::Bits::default(); + for bit in self.as_bitslice() { + bits.push_bit(*bit) + } + bits + } +} + +impl From
for SmtKey { + fn from(addr: Address) -> Self { + let H256(bytes) = keccak_hash::keccak(addr); + Self::new(BitArray::<_>::new(bytes)).expect("SmtKey has room for 256 bits") + } +} + +impl Ord for SmtKey { + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.as_bitslice().cmp(other.as_bitslice()) + } +} +impl PartialOrd for SmtKey { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} +impl Eq for SmtKey {} +impl PartialEq for SmtKey { + fn eq(&self, other: &Self) -> bool { + self.as_bitslice().eq(other.as_bitslice()) + } +} + /// Per-block, `txn_ix -> [u8]`. /// /// See @@ -196,10 +291,10 @@ impl TransactionTrie { pub fn insert(&mut self, txn_ix: usize, val: Vec) -> anyhow::Result>> { let prev = self .untyped - .get(TrieKey::from_txn_ix(txn_ix).into_nibbles()) + .get(MptKey::from_txn_ix(txn_ix).into_nibbles()) .map(Vec::from); self.untyped - .insert(TrieKey::from_txn_ix(txn_ix).into_nibbles(), val)?; + .insert(MptKey::from_txn_ix(txn_ix).into_nibbles(), val)?; Ok(prev) } pub fn root(&self) -> H256 { @@ -214,7 +309,7 @@ impl TransactionTrie { &self.untyped, txn_ixs .into_iter() - .map(|it| TrieKey::from_txn_ix(it).into_nibbles()), + .map(|it| MptKey::from_txn_ix(it).into_nibbles()), )?; Ok(()) } @@ -241,10 +336,10 @@ impl ReceiptTrie { pub fn insert(&mut self, txn_ix: usize, val: Vec) -> anyhow::Result>> { let prev = self .untyped - .get(TrieKey::from_txn_ix(txn_ix).into_nibbles()) + .get(MptKey::from_txn_ix(txn_ix).into_nibbles()) .map(Vec::from); self.untyped - .insert(TrieKey::from_txn_ix(txn_ix).into_nibbles(), val)?; + .insert(MptKey::from_txn_ix(txn_ix).into_nibbles(), val)?; Ok(prev) } pub fn root(&self) -> H256 { @@ -259,7 +354,7 @@ impl ReceiptTrie { &self.untyped, txn_ixs .into_iter() - .map(|it| TrieKey::from_txn_ix(it).into_nibbles()), + .map(|it| MptKey::from_txn_ix(it).into_nibbles()), )?; Ok(()) } @@ -271,18 +366,14 @@ impl From for HashedPartialTrie { } } -/// TODO(0xaatif): document this after refactoring is done https://github.com/0xPolygonZero/zk_evm/issues/275 +/// TODO(0xaatif): document this after refactoring is done pub trait StateTrie { - fn insert_by_address( - &mut self, - address: Address, - account: AccountRlp, - ) -> anyhow::Result>; - fn insert_hash_by_key(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()>; + type Key; + fn insert_by_address(&mut self, address: Address, account: AccountRlp) -> anyhow::Result<()>; fn get_by_address(&self, address: Address) -> Option; - fn reporting_remove(&mut self, address: Address) -> anyhow::Result>; - /// _Hash out_ parts of the trie that aren't in `txn_ixs`. - fn mask(&mut self, address: impl IntoIterator) -> anyhow::Result<()>; + fn reporting_remove(&mut self, address: Address) -> anyhow::Result>; + /// _Hash out_ parts of the trie that aren't in `addresses`. + fn mask(&mut self, address: impl IntoIterator) -> anyhow::Result<()>; fn iter(&self) -> impl Iterator + '_; fn root(&self) -> H256; } @@ -304,13 +395,17 @@ impl StateMpt { }, } } + /// Insert a _hashed out_ part of the trie + pub fn insert_hash_by_key(&mut self, key: MptKey, hash: H256) -> anyhow::Result<()> { + self.typed.insert_hash(key, hash) + } #[deprecated = "prefer operations on `Address` where possible, as SMT support requires this"] pub fn insert_by_hashed_address( &mut self, key: H256, account: AccountRlp, - ) -> anyhow::Result> { - self.typed.insert(TrieKey::from_hash(key), account) + ) -> anyhow::Result<()> { + self.typed.insert(MptKey::from_hash(key), account) } pub fn iter(&self) -> impl Iterator + '_ { self.typed @@ -323,34 +418,27 @@ impl StateMpt { } impl StateTrie for StateMpt { - fn insert_by_address( - &mut self, - address: Address, - account: AccountRlp, - ) -> anyhow::Result> { + type Key = MptKey; + fn insert_by_address(&mut self, address: Address, account: AccountRlp) -> anyhow::Result<()> { #[expect(deprecated)] self.insert_by_hashed_address(keccak_hash::keccak(address), account) } - /// Insert an _hashed out_ part of the trie - fn insert_hash_by_key(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()> { - self.typed.insert_hash(key, hash) - } fn get_by_address(&self, address: Address) -> Option { self.typed - .get(TrieKey::from_hash(keccak_hash::keccak(address))) + .get(MptKey::from_hash(keccak_hash::keccak(address))) } /// Delete the account at `address`, returning any remaining branch on /// collapse - fn reporting_remove(&mut self, address: Address) -> anyhow::Result> { + fn reporting_remove(&mut self, address: Address) -> anyhow::Result> { delete_node_and_report_remaining_key_if_branch_collapsed( self.typed.as_mut_hashed_partial_trie_unchecked(), - TrieKey::from_address(address), + MptKey::from_address(address), ) } - fn mask(&mut self, addresses: impl IntoIterator) -> anyhow::Result<()> { + fn mask(&mut self, addresses: impl IntoIterator) -> anyhow::Result<()> { let inner = mpt_trie::trie_subsets::create_trie_subset( self.typed.as_hashed_partial_trie(), - addresses.into_iter().map(TrieKey::into_nibbles), + addresses.into_iter().map(MptKey::into_nibbles), )?; self.typed = TypedMpt { inner, @@ -377,31 +465,30 @@ impl From for HashedPartialTrie { } } +// TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/706 +// We're covering for [`smt_trie`] in a couple of ways: +// - insertion operations aren't fallible, they just panic. +// - it documents a requirement that `set_hash` is called before `set`. +#[derive(Clone, Debug)] pub struct StateSmt { address2state: BTreeMap, - hashed_out: BTreeMap, + hashed_out: BTreeMap, } impl StateTrie for StateSmt { - fn insert_by_address( - &mut self, - address: Address, - account: AccountRlp, - ) -> anyhow::Result> { - Ok(self.address2state.insert(address, account)) - } - fn insert_hash_by_key(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()> { - self.hashed_out.insert(key, hash); + type Key = SmtKey; + fn insert_by_address(&mut self, address: Address, account: AccountRlp) -> anyhow::Result<()> { + self.address2state.insert(address, account); Ok(()) } fn get_by_address(&self, address: Address) -> Option { self.address2state.get(&address).copied() } - fn reporting_remove(&mut self, address: Address) -> anyhow::Result> { + fn reporting_remove(&mut self, address: Address) -> anyhow::Result> { self.address2state.remove(&address); Ok(None) } - fn mask(&mut self, address: impl IntoIterator) -> anyhow::Result<()> { + fn mask(&mut self, address: impl IntoIterator) -> anyhow::Result<()> { let _ = address; Ok(()) } @@ -411,7 +498,111 @@ impl StateTrie for StateSmt { .map(|(addr, acct)| (keccak_hash::keccak(addr), *acct)) } fn root(&self) -> H256 { - todo!() + conv_hash::smt2eth(self.as_smt().root) + } +} + +impl StateSmt { + pub(crate) fn new_unchecked( + address2state: BTreeMap, + hashed_out: BTreeMap, + ) -> Self { + Self { + address2state, + hashed_out, + } + } + + fn as_smt(&self) -> smt_trie::smt::Smt { + let Self { + address2state, + hashed_out, + } = self; + let mut smt = smt_trie::smt::Smt::::default(); + for (k, v) in hashed_out { + smt.set_hash(k.into_smt_bits(), conv_hash::eth2smt(*v)); + } + for ( + addr, + AccountRlp { + nonce, + balance, + storage_root, + code_hash, + }, + ) in address2state + { + smt.set(smt_trie::keys::key_nonce(*addr), *nonce); + smt.set(smt_trie::keys::key_balance(*addr), *balance); + smt.set(smt_trie::keys::key_code(*addr), code_hash.into_uint()); + smt.set( + // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/707 + // combined abstraction for state and storage + smt_trie::keys::key_storage(*addr, U256::zero()), + storage_root.into_uint(), + ); + } + smt + } +} + +mod conv_hash { + //! We [`u64::to_le_bytes`] because: + //! - Reference go code just puns the bytes: + //! - It's better to fix the endianness for correctness. + //! - Most (consumer) CPUs are little-endian. + + use std::array; + + use ethereum_types::H256; + use itertools::Itertools as _; + use plonky2::{ + field::{ + goldilocks_field::GoldilocksField, + types::{Field as _, PrimeField64}, + }, + hash::hash_types::HashOut, + }; + + /// # Panics + /// - On certain inputs if `debug_assertions` are enabled. See + /// [`GoldilocksField::from_canonical_u64`] for more. + pub fn eth2smt(H256(bytes): H256) -> smt_trie::smt::HashOut { + let mut bytes = bytes.into_iter(); + // (no unsafe, no unstable) + let ret = HashOut { + elements: array::from_fn(|_ix| { + let (a, b, c, d, e, f, g, h) = bytes.next_tuple().unwrap(); + GoldilocksField::from_canonical_u64(u64::from_le_bytes([a, b, c, d, e, f, g, h])) + }), + }; + assert_eq!(bytes.len(), 0); + ret + } + pub fn smt2eth(HashOut { elements }: smt_trie::smt::HashOut) -> H256 { + H256( + build_array::ArrayBuilder::from_iter( + elements + .iter() + .map(GoldilocksField::to_canonical_u64) + .flat_map(u64::to_le_bytes), + ) + .build_exact() + .unwrap(), + ) + } + + #[test] + fn test() { + use plonky2::field::types::Field64 as _; + let mut max = std::iter::repeat(GoldilocksField::ORDER - 1).flat_map(u64::to_le_bytes); + for h in [ + H256::zero(), + H256(array::from_fn(|ix| ix as u8)), + H256(array::from_fn(|_| max.next().unwrap())), + ] { + assert_eq!(smt2eth(eth2smt(h)), h); + } } } @@ -428,15 +619,15 @@ impl StorageTrie { untyped: HashedPartialTrie::new_with_strategy(Node::Empty, strategy), } } - pub fn get(&mut self, key: &TrieKey) -> Option<&[u8]> { + pub fn get(&mut self, key: &MptKey) -> Option<&[u8]> { self.untyped.get(key.into_nibbles()) } - pub fn insert(&mut self, key: TrieKey, value: Vec) -> anyhow::Result>> { + pub fn insert(&mut self, key: MptKey, value: Vec) -> anyhow::Result>> { let prev = self.get(&key).map(Vec::from); self.untyped.insert(key.into_nibbles(), value)?; Ok(prev) } - pub fn insert_hash(&mut self, key: TrieKey, hash: H256) -> anyhow::Result<()> { + pub fn insert_hash(&mut self, key: MptKey, hash: H256) -> anyhow::Result<()> { self.untyped.insert(key.into_nibbles(), hash)?; Ok(()) } @@ -446,17 +637,17 @@ impl StorageTrie { pub const fn as_hashed_partial_trie(&self) -> &HashedPartialTrie { &self.untyped } - pub fn reporting_remove(&mut self, key: TrieKey) -> anyhow::Result> { + pub fn reporting_remove(&mut self, key: MptKey) -> anyhow::Result> { delete_node_and_report_remaining_key_if_branch_collapsed(&mut self.untyped, key) } pub fn as_mut_hashed_partial_trie_unchecked(&mut self) -> &mut HashedPartialTrie { &mut self.untyped } /// _Hash out_ the parts of the trie that aren't in `paths`. - pub fn mask(&mut self, paths: impl IntoIterator) -> anyhow::Result<()> { + pub fn mask(&mut self, paths: impl IntoIterator) -> anyhow::Result<()> { self.untyped = mpt_trie::trie_subsets::create_trie_subset( &self.untyped, - paths.into_iter().map(TrieKey::into_nibbles), + paths.into_iter().map(MptKey::into_nibbles), )?; Ok(()) } @@ -473,18 +664,18 @@ impl From for HashedPartialTrie { /// plonky2. Returns the key to the remaining child if a collapse occurred. fn delete_node_and_report_remaining_key_if_branch_collapsed( trie: &mut HashedPartialTrie, - key: TrieKey, -) -> anyhow::Result> { + key: MptKey, +) -> anyhow::Result> { let old_trace = get_trie_trace(trie, key); trie.delete(key.into_nibbles())?; let new_trace = get_trie_trace(trie, key); Ok( node_deletion_resulted_in_a_branch_collapse(&old_trace, &new_trace) - .map(TrieKey::from_nibbles), + .map(MptKey::from_nibbles), ) } -fn get_trie_trace(trie: &HashedPartialTrie, k: TrieKey) -> mpt_trie::utils::TriePath { +fn get_trie_trace(trie: &HashedPartialTrie, k: MptKey) -> mpt_trie::utils::TriePath { mpt_trie::special_query::path_for_query(trie, k.into_nibbles(), true).collect() } diff --git a/trace_decoder/src/type1.rs b/trace_decoder/src/type1.rs index aeea0dbb6..c44beaec7 100644 --- a/trace_decoder/src/type1.rs +++ b/trace_decoder/src/type1.rs @@ -12,7 +12,7 @@ use mpt_trie::partial_trie::OnOrphanedHashNode; use nunny::NonEmpty; use u4::U4; -use crate::typed_mpt::{StateMpt, StateTrie as _, StorageTrie, TrieKey}; +use crate::tries::{MptKey, StateMpt, StorageTrie}; use crate::wire::{Instruction, SmtLeaf}; #[derive(Debug, Clone)] @@ -66,10 +66,10 @@ fn visit( Node::Hash(Hash { raw_hash }) => { frontend .state - .insert_hash_by_key(TrieKey::new(path.iter().copied())?, raw_hash.into())?; + .insert_hash_by_key(MptKey::new(path.iter().copied())?, raw_hash.into())?; } Node::Leaf(Leaf { key, value }) => { - let path = TrieKey::new(path.iter().copied().chain(key))? + let path = MptKey::new(path.iter().copied().chain(key))? .into_hash() .context("invalid depth for leaf of state trie")?; match value { @@ -106,8 +106,7 @@ fn visit( }, }; #[expect(deprecated)] // this is MPT-specific code - let clobbered = frontend.state.insert_by_hashed_address(path, account)?; - ensure!(clobbered.is_none(), "duplicate account"); + frontend.state.insert_by_hashed_address(path, account)?; } } } @@ -141,12 +140,12 @@ fn node2storagetrie(node: Node) -> anyhow::Result { ) -> anyhow::Result<()> { match node { Node::Hash(Hash { raw_hash }) => { - mpt.insert_hash(TrieKey::new(path.iter().copied())?, raw_hash.into())?; + mpt.insert_hash(MptKey::new(path.iter().copied())?, raw_hash.into())?; } Node::Leaf(Leaf { key, value }) => { match value { Either::Left(Value { raw_value }) => mpt.insert( - TrieKey::new(path.iter().copied().chain(key))?, + MptKey::new(path.iter().copied().chain(key))?, rlp::encode(&raw_value.as_slice()).to_vec(), )?, Either::Right(_) => bail!("unexpected account node in storage trie"), @@ -380,6 +379,8 @@ fn finish_stack(v: &mut Vec) -> anyhow::Result { #[test] fn test_tries() { + use crate::tries::StateTrie as _; + for (ix, case) in serde_json::from_str::>(include_str!("cases/zero_jerigon.json")) .unwrap() diff --git a/trace_decoder/src/type2.rs b/trace_decoder/src/type2.rs index dd3e45c4b..a71761533 100644 --- a/trace_decoder/src/type2.rs +++ b/trace_decoder/src/type2.rs @@ -1,35 +1,34 @@ //! Frontend for the witness format emitted by e.g [`0xPolygonHermez/cdk-erigon`](https://github.com/0xPolygonHermez/cdk-erigon/) //! Ethereum node. -use std::{ - collections::{HashMap, HashSet}, - iter, -}; +use std::collections::{BTreeMap, HashSet}; use anyhow::{bail, ensure, Context as _}; -use bitvec::vec::BitVec; -use either::Either; -use ethereum_types::BigEndianHash as _; -use itertools::{EitherOrBoth, Itertools as _}; +use ethereum_types::{Address, U256}; +use evm_arithmetization::generation::mpt::AccountRlp; +use itertools::EitherOrBoth; +use keccak_hash::H256; use nunny::NonEmpty; -use plonky2::field::types::Field; - -use crate::wire::{Instruction, SmtLeaf, SmtLeafType}; +use stackstack::Stack; -type SmtTrie = smt_trie::smt::Smt; +use crate::{ + tries::{SmtKey, StateSmt}, + wire::{Instruction, SmtLeaf, SmtLeafType}, +}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +/// Combination of all the [`SmtLeaf::node_type`]s +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] pub struct CollatedLeaf { pub balance: Option, pub nonce: Option, - pub code_hash: Option, - pub storage_root: Option, + pub code: Option, + pub code_length: Option, + pub storage: BTreeMap, } pub struct Frontend { - pub trie: SmtTrie, + pub trie: StateSmt, pub code: HashSet>>, - pub collation: HashMap, } /// # Panics @@ -37,18 +36,13 @@ pub struct Frontend { /// NOT call this function on untrusted inputs. pub fn frontend(instructions: impl IntoIterator) -> anyhow::Result { let (node, code) = fold(instructions).context("couldn't fold smt from instructions")?; - let (trie, collation) = - node2trie(node).context("couldn't construct trie and collation from folded node")?; - Ok(Frontend { - trie, - code, - collation, - }) + let trie = node2trie(node).context("couldn't construct trie and collation from folded node")?; + Ok(Frontend { trie, code }) } /// Node in a binary (SMT) tree. /// -/// This is an intermediary type on the way to [`SmtTrie`]. +/// This is an intermediary type on the way to [`StateSmt`]. enum Node { Branch(EitherOrBoth>), Hash([u8; 32]), @@ -105,9 +99,9 @@ fn fold1(instructions: impl IntoIterator) -> anyhow::Result< Ok(Some(match mask { // note that the single-child bits are reversed... - 0b0001 => Node::Branch(EitherOrBoth::Left(get_child()?)), - 0b0010 => Node::Branch(EitherOrBoth::Right(get_child()?)), - 0b0011 => Node::Branch(EitherOrBoth::Both(get_child()?, get_child()?)), + 0b_01 => Node::Branch(EitherOrBoth::Left(get_child()?)), + 0b_10 => Node::Branch(EitherOrBoth::Right(get_child()?)), + 0b_11 => Node::Branch(EitherOrBoth::Both(get_child()?, get_child()?)), other => bail!("unexpected bit pattern in Branch mask: {:#b}", other), })) } @@ -119,113 +113,162 @@ fn fold1(instructions: impl IntoIterator) -> anyhow::Result< } } -/// Pack a [`Node`] tree into an [`SmtTrie`]. -/// Also summarizes the [`Node::Leaf`]s out-of-band. -/// -/// # Panics -/// - if the tree is too deep. -/// - if [`SmtLeaf::address`] or [`SmtLeaf::value`] are the wrong length. -/// - if [`SmtLeafType::Storage`] is the wrong length. -/// - [`SmtTrie`] panics internally. -fn node2trie( - node: Node, -) -> anyhow::Result<(SmtTrie, HashMap)> { - let mut trie = SmtTrie::default(); - - let (hashes, leaves) = - iter_leaves(node).partition_map::, Vec<_>, _, _, _>(|(path, leaf)| match leaf { - Either::Left(it) => Either::Left((path, it)), - Either::Right(it) => Either::Right(it), - }); - - for (path, hash) in hashes { - // needs to be called before `set`, below, "to avoid any issues" according - // to the smt docs. - trie.set_hash( - bits2bits(path), - smt_trie::smt::HashOut { - elements: { - let ethereum_types::U256(arr) = ethereum_types::H256(hash).into_uint(); - arr.map(smt_trie::smt::F::from_canonical_u64) +fn node2trie(node: Node) -> anyhow::Result { + let mut hashes = BTreeMap::new(); + let mut leaves = BTreeMap::new(); + visit(&mut hashes, &mut leaves, Stack::new(), node)?; + Ok(StateSmt::new_unchecked( + leaves + .into_iter() + .map( + |( + addr, + CollatedLeaf { + balance, + nonce, + // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/707 + // we shouldn't ignore these fields + code: _, + code_length: _, + storage: _, + }, + )| { + ( + addr, + AccountRlp { + nonce: nonce.unwrap_or_default(), + balance: balance.unwrap_or_default(), + storage_root: H256::zero(), + code_hash: H256::zero(), + }, + ) }, - }, - ) - } + ) + .collect(), + hashes, + )) +} - let mut collated = HashMap::::new(); - for SmtLeaf { - node_type, - address, - value, - } in leaves - { - let address = ethereum_types::Address::from_slice(&address); - let collated = collated.entry(address).or_default(); - let value = ethereum_types::U256::from_big_endian(&value); - let key = match node_type { - SmtLeafType::Balance => { - ensure!(collated.balance.is_none(), "double write of field"); - collated.balance = Some(value); - smt_trie::keys::key_balance(address) - } - SmtLeafType::Nonce => { - ensure!(collated.nonce.is_none(), "double write of field"); - collated.nonce = Some(value); - smt_trie::keys::key_nonce(address) +fn visit( + hashes: &mut BTreeMap, + leaves: &mut BTreeMap, + path: Stack, + node: Node, +) -> anyhow::Result<()> { + match node { + Node::Branch(children) => { + let (left, right) = children.left_and_right(); + if let Some(left) = left { + visit(hashes, leaves, path.pushed(false), *left)?; } - SmtLeafType::Code => { - ensure!(collated.code_hash.is_none(), "double write of field"); - collated.code_hash = Some({ - let mut it = ethereum_types::H256::zero(); - value.to_big_endian(it.as_bytes_mut()); - it - }); - smt_trie::keys::key_code(address) + if let Some(right) = right { + visit(hashes, leaves, path.pushed(true), *right)?; } - SmtLeafType::Storage(it) => { - ensure!(collated.storage_root.is_none(), "double write of field"); - // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/275 - // do we not do anything with the storage here? - smt_trie::keys::key_storage(address, ethereum_types::U256::from_big_endian(&it)) + } + Node::Hash(hash) => { + hashes.insert(SmtKey::new(path.iter().copied())?, H256(hash)); + } + Node::Leaf(SmtLeaf { + node_type, + address, + value, + }) => { + let address = Address::from_slice(&address); + let collated = leaves.entry(address).or_default(); + let value = U256::from_big_endian(&value); + macro_rules! ensure { + ($expr:expr) => { + ::anyhow::ensure!($expr, "double write of field for address {}", address) + }; } - SmtLeafType::CodeLength => smt_trie::keys::key_code_length(address), - }; - trie.set(key, value) + match node_type { + SmtLeafType::Balance => { + ensure!(collated.balance.is_none()); + collated.balance = Some(value) + } + SmtLeafType::Nonce => { + ensure!(collated.nonce.is_none()); + collated.nonce = Some(value) + } + SmtLeafType::Code => { + ensure!(collated.code.is_none()); + collated.code = Some(value) + } + SmtLeafType::Storage(slot) => { + let clobbered = collated.storage.insert(U256::from_big_endian(&slot), value); + ensure!(clobbered.is_none()) + } + SmtLeafType::CodeLength => { + ensure!(collated.code_length.is_none()); + collated.code_length = Some(value) + } + }; + } } - Ok((trie, collated)) + Ok(()) } -/// # Panics -/// - on overcapacity -fn bits2bits(ours: BitVec) -> smt_trie::bits::Bits { - let mut theirs = smt_trie::bits::Bits::empty(); - for it in ours { - theirs.push_bit(it) - } - theirs -} +#[test] +fn test_tries() { + type Smt = smt_trie::smt::Smt; + use ethereum_types::BigEndianHash as _; + use plonky2::field::types::{Field, Field64 as _}; -/// Simple, inefficient visitor of all leaves of the [`Node`] tree. -#[allow(clippy::type_complexity)] -fn iter_leaves(node: Node) -> Box)>> { - match node { - Node::Hash(it) => Box::new(iter::once((BitVec::new(), Either::Left(it)))), - Node::Branch(it) => { - let (left, right) = it.left_and_right(); - let left = left - .into_iter() - .flat_map(|it| iter_leaves(*it).update(|(path, _)| path.insert(0, false))); - let right = right - .into_iter() - .flat_map(|it| iter_leaves(*it).update(|(path, _)| path.insert(0, true))); - Box::new(left.chain(right)) + // TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/707 + // this logic should live in StateSmt, but we need to + // - abstract over state and storage tries + // - parameterize the account types + // we preserve this code as a tested record of how it _should_ + // be done. + fn node2trie(node: Node) -> anyhow::Result { + let mut trie = Smt::default(); + let mut hashes = BTreeMap::new(); + let mut leaves = BTreeMap::new(); + visit(&mut hashes, &mut leaves, Stack::new(), node)?; + for (key, hash) in hashes { + trie.set_hash( + key.into_smt_bits(), + smt_trie::smt::HashOut { + elements: { + let ethereum_types::U256(arr) = hash.into_uint(); + for u in arr { + ensure!(u < smt_trie::smt::F::ORDER); + } + arr.map(smt_trie::smt::F::from_canonical_u64) + }, + }, + ); } - Node::Leaf(it) => Box::new(iter::once((BitVec::new(), Either::Right(it)))), + for ( + addr, + CollatedLeaf { + balance, + nonce, + code, + code_length, + storage, + }, + ) in leaves + { + use smt_trie::keys::{key_balance, key_code, key_code_length, key_nonce, key_storage}; + + for (value, key_fn) in [ + (balance, key_balance as fn(_) -> _), + (nonce, key_nonce), + (code, key_code), + (code_length, key_code_length), + ] { + if let Some(value) = value { + trie.set(key_fn(addr), value); + } + } + for (slot, value) in storage { + trie.set(key_storage(addr, slot), value); + } + } + Ok(trie) } -} -#[test] -fn test_tries() { for (ix, case) in serde_json::from_str::>(include_str!("cases/hermez_cdk_erigon.json")) .unwrap() @@ -234,10 +277,11 @@ fn test_tries() { { println!("case {}", ix); let instructions = crate::wire::parse(&case.bytes).unwrap(); - let frontend = frontend(instructions).unwrap(); + let (node, _code) = fold(instructions).unwrap(); + let trie = node2trie(node).unwrap(); assert_eq!(case.expected_state_root, { let mut it = [0; 32]; - smt_trie::utils::hashout2u(frontend.trie.root).to_big_endian(&mut it); + smt_trie::utils::hashout2u(trie.root).to_big_endian(&mut it); ethereum_types::H256(it) }); } diff --git a/trace_decoder/src/wire.rs b/trace_decoder/src/wire.rs index 6f56f1e44..63dee6040 100644 --- a/trace_decoder/src/wire.rs +++ b/trace_decoder/src/wire.rs @@ -1,6 +1,6 @@ //! We support two wire formats: -//! - Type 1, based on [this specification](https://gist.github.com/mandrigin/ff7eccf30d0ef9c572bafcb0ab665cff#the-bytes-layout). -//! - Type 2, loosely based on [this specification](https://github.com/0xPolygonHermez/cdk-erigon/blob/d1d6b3c7a4c81c46fd995c1baa5c1f8069ff0348/turbo/trie/WITNESS.md) +//! - Type 1 (AKA MPT), based on [this specification](https://gist.github.com/mandrigin/ff7eccf30d0ef9c572bafcb0ab665cff#the-bytes-layout). +//! - Type 2 (AKA SMT), loosely based on [this specification](https://github.com/0xPolygonHermez/cdk-erigon/blob/d1d6b3c7a4c81c46fd995c1baa5c1f8069ff0348/turbo/trie/WITNESS.md) //! //! Fortunately, their opcodes don't conflict, so we can have a single //! [`Instruction`] type, with shared parsing logic in this module, and bail on @@ -80,6 +80,8 @@ pub enum Instruction { } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +// TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/705 +// `address` and `value` should be fixed length fields pub struct SmtLeaf { pub node_type: SmtLeafType, pub address: NonEmpty>, @@ -87,6 +89,8 @@ pub struct SmtLeaf { } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +// TODO(0xaatif): https://github.com/0xPolygonZero/zk_evm/issues/705 +// `Storage` should contain a fixed length field pub enum SmtLeafType { Balance, Nonce, diff --git a/trace_decoder/tests/consistent-with-header.rs b/trace_decoder/tests/consistent-with-header.rs index 609fd57bb..63df41b4f 100644 --- a/trace_decoder/tests/consistent-with-header.rs +++ b/trace_decoder/tests/consistent-with-header.rs @@ -11,6 +11,7 @@ use itertools::Itertools; use libtest_mimic::{Arguments, Trial}; use mpt_trie::partial_trie::PartialTrie as _; use trace_decoder::observer::DummyObserver; +use zero::prover::WIRE_DISPOSITION; fn main() -> anyhow::Result<()> { let mut trials = vec![]; @@ -29,6 +30,7 @@ fn main() -> anyhow::Result<()> { other.clone(), batch_size, &mut DummyObserver::new(), + WIRE_DISPOSITION, ) .map_err(|e| format!("{e:?}"))?; // get the full cause chain check!(gen_inputs.len() >= 2); diff --git a/trace_decoder/tests/simulate-execution.rs b/trace_decoder/tests/simulate-execution.rs index d0476c2b7..fc7136c34 100644 --- a/trace_decoder/tests/simulate-execution.rs +++ b/trace_decoder/tests/simulate-execution.rs @@ -9,6 +9,7 @@ use common::{cases, Case}; use libtest_mimic::{Arguments, Trial}; use plonky2::field::goldilocks_field::GoldilocksField; use trace_decoder::observer::DummyObserver; +use zero::prover::WIRE_DISPOSITION; fn main() -> anyhow::Result<()> { let mut trials = vec![]; @@ -20,11 +21,16 @@ fn main() -> anyhow::Result<()> { other, } in cases()? { - let gen_inputs = - trace_decoder::entrypoint(trace, other, batch_size, &mut DummyObserver::new()) - .context(format!( - "error in `trace_decoder` for {name} at batch size {batch_size}" - ))?; + let gen_inputs = trace_decoder::entrypoint( + trace, + other, + batch_size, + &mut DummyObserver::new(), + WIRE_DISPOSITION, + ) + .context(format!( + "error in `trace_decoder` for {name} at batch size {batch_size}" + ))?; for (ix, gi) in gen_inputs.into_iter().enumerate() { trials.push(Trial::test( format!("{name}@{batch_size}/{ix}"), diff --git a/zero/Cargo.toml b/zero/Cargo.toml index 18da12fc3..362125e22 100644 --- a/zero/Cargo.toml +++ b/zero/Cargo.toml @@ -17,6 +17,7 @@ alloy-serde.workspace = true anyhow.workspace = true async-stream.workspace = true axum.workspace = true +cfg-if = "1.0.0" clap = { workspace = true, features = ["derive", "string"] } compat.workspace = true directories = "5.0.1" diff --git a/zero/src/bin/leader.rs b/zero/src/bin/leader.rs index d1b7c70fd..973a180d4 100644 --- a/zero/src/bin/leader.rs +++ b/zero/src/bin/leader.rs @@ -6,10 +6,11 @@ use anyhow::Result; use clap::Parser; use cli::Command; use client::RpcParams; +use paladin::config::Config; use paladin::runtime::Runtime; use tracing::info; use zero::env::load_dotenvy_vars_if_present; -use zero::prover::ProverConfig; +use zero::prover::{ProofRuntime, ProverConfig}; use zero::{ block_interval::BlockInterval, prover_state::persistence::set_circuit_cache_dir_env_if_not_set, }; @@ -24,6 +25,10 @@ mod leader { pub mod stdio; } +const HEAVY_PROOF_ROUTING_KEY: &str = "heavy-proof"; +const LIGHT_PROOF_ROUTING_KEY: &str = "light-proof"; +const DEFAULT_ROUTING_KEY: &str = paladin::runtime::DEFAULT_ROUTING_KEY; + #[tokio::main] async fn main() -> Result<()> { load_dotenvy_vars_if_present(); @@ -36,7 +41,33 @@ async fn main() -> Result<()> { return zero::prover_state::persistence::delete_all(); } - let runtime = Arc::new(Runtime::from_config(&args.paladin, register()).await?); + let mut light_proof_routing_key = DEFAULT_ROUTING_KEY.to_string(); + let mut heavy_proof_routing_key = DEFAULT_ROUTING_KEY.to_string(); + if args.worker_run_mode == cli::WorkerRunMode::Affinity { + // If we're running in affinity mode, we need to set the routing key for the + // heavy proof and light proof. + info!("Workers running in affinity mode"); + light_proof_routing_key = LIGHT_PROOF_ROUTING_KEY.to_string(); + heavy_proof_routing_key = HEAVY_PROOF_ROUTING_KEY.to_string(); + } + + let light_proof_paladin_args = Config { + task_bus_routing_key: Some(light_proof_routing_key), + ..args.paladin.clone() + }; + + let heavy_proof_paladin_args = Config { + task_bus_routing_key: Some(heavy_proof_routing_key), + ..args.paladin + }; + + let light_proof = Runtime::from_config(&light_proof_paladin_args, register()).await?; + let heavy_proof = Runtime::from_config(&heavy_proof_paladin_args, register()).await?; + + let proof_runtime = Arc::new(ProofRuntime { + light_proof, + heavy_proof, + }); let prover_config: ProverConfig = args.prover_config.into(); if prover_config.block_pool_size == 0 { panic!("block-pool-size must be greater than 0"); @@ -55,7 +86,7 @@ async fn main() -> Result<()> { match args.command { Command::Stdio { previous_proof } => { let previous_proof = get_previous_proof(previous_proof)?; - stdio::stdio_main(runtime, previous_proof, Arc::new(prover_config)).await?; + stdio::stdio_main(proof_runtime, previous_proof, Arc::new(prover_config)).await?; } Command::Http { port, output_dir } => { // check if output_dir exists, is a directory, and is writable @@ -67,7 +98,7 @@ async fn main() -> Result<()> { panic!("output-dir is not a writable directory"); } - http::http_main(runtime, port, output_dir, Arc::new(prover_config)).await?; + http::http_main(proof_runtime, port, output_dir, Arc::new(prover_config)).await?; } Command::Rpc { rpc_url, @@ -86,7 +117,7 @@ async fn main() -> Result<()> { info!("Proving interval {block_interval}"); client_main( - runtime, + proof_runtime, RpcParams { rpc_url, rpc_type, diff --git a/zero/src/bin/leader/cli.rs b/zero/src/bin/leader/cli.rs index ede966493..d21af25a4 100644 --- a/zero/src/bin/leader/cli.rs +++ b/zero/src/bin/leader/cli.rs @@ -2,12 +2,14 @@ use std::path::PathBuf; use std::time::Duration; use alloy::transports::http::reqwest::Url; -use clap::{Parser, Subcommand, ValueHint}; +use clap::{Parser, Subcommand, ValueEnum, ValueHint}; use zero::parsing::parse_duration; use zero::prover::cli::CliProverConfig; use zero::prover_state::cli::CliProverStateConfig; use zero::rpc::{JumpdestSrc, RpcType}; +const WORKER_HELP_HEADING: &str = "Worker Config options"; + /// zero-bin leader config #[derive(Parser)] #[command(version = zero::version(), propagate_version = true)] @@ -25,6 +27,24 @@ pub(crate) struct Cli { // mode. #[clap(flatten)] pub(crate) prover_state_config: CliProverStateConfig, + + // Mode to use for worker for setup (affinity or default) + #[arg(long = "worker-run-mode", help_heading = WORKER_HELP_HEADING, value_enum, default_value = "default")] + pub(crate) worker_run_mode: WorkerRunMode, +} + +/// Defines the mode for worker setup in terms of job allocation: +/// +/// - `Affinity`: Workers are assigned specific types of jobs based on their +/// capabilities, distinguishing between heavy and light jobs. +/// - `Default`: No job distinction is made — any worker can handle any type of +/// job, whether heavy or light. +/// +/// This enum allows for flexible worker configuration based on workload needs. +#[derive(ValueEnum, Clone, PartialEq, Debug)] +pub enum WorkerRunMode { + Affinity, + Default, } #[derive(Subcommand)] diff --git a/zero/src/bin/leader/client.rs b/zero/src/bin/leader/client.rs index b999bb584..0b4c3b95b 100644 --- a/zero/src/bin/leader/client.rs +++ b/zero/src/bin/leader/client.rs @@ -4,7 +4,6 @@ use std::time::Duration; use alloy::rpc::types::{BlockId, BlockNumberOrTag}; use alloy::transports::http::reqwest::Url; use anyhow::{anyhow, Result}; -use paladin::runtime::Runtime; use tokio::sync::mpsc; use tracing::info; use zero::block_interval::{BlockInterval, BlockIntervalStream}; @@ -14,6 +13,8 @@ use zero::prover::{self, BlockProverInput, ProverConfig}; use zero::rpc::{self, JumpdestSrc}; use zero::rpc::{retry::build_http_retry_provider, RpcType}; +use crate::ProofRuntime; + #[derive(Debug)] pub struct RpcParams { pub rpc_url: Url, @@ -34,7 +35,7 @@ pub struct LeaderConfig { /// The main function for the client. pub(crate) async fn client_main( - runtime: Arc, + proof_runtime: Arc, rpc_params: RpcParams, block_interval: BlockInterval, mut leader_config: LeaderConfig, @@ -66,10 +67,10 @@ pub(crate) async fn client_main( let (block_tx, block_rx) = mpsc::channel::<(BlockProverInput, bool)>(zero::BLOCK_CHANNEL_SIZE); // Run proving task - let runtime_ = runtime.clone(); + let proof_runtime_ = proof_runtime.clone(); let proving_task = tokio::spawn(prover::prove( block_rx, - runtime_, + proof_runtime_, leader_config.previous_proof.take(), Arc::new(leader_config.prover_config), )); @@ -117,7 +118,8 @@ pub(crate) async fn client_main( } } - runtime.close().await?; + proof_runtime.light_proof.close().await?; + proof_runtime.heavy_proof.close().await?; if test_only { info!("All proof witnesses have been generated successfully."); diff --git a/zero/src/bin/leader/http.rs b/zero/src/bin/leader/http.rs index 02e968ec0..0bc90ac40 100644 --- a/zero/src/bin/leader/http.rs +++ b/zero/src/bin/leader/http.rs @@ -3,16 +3,17 @@ use std::{net::SocketAddr, path::PathBuf, sync::Arc}; use alloy::primitives::U256; use anyhow::{bail, Result}; use axum::{http::StatusCode, routing::post, Json, Router}; -use paladin::runtime::Runtime; use serde::{Deserialize, Serialize}; use serde_json::to_writer; use tracing::{debug, error, info}; use zero::proof_types::GeneratedBlockProof; use zero::prover::{BlockProverInput, ProverConfig}; +use crate::ProofRuntime; + /// The main function for the HTTP mode. pub(crate) async fn http_main( - runtime: Arc, + proof_runtime: Arc, port: u16, output_dir: PathBuf, prover_config: Arc, @@ -22,10 +23,7 @@ pub(crate) async fn http_main( let app = Router::new().route( "/prove", - post({ - let runtime = runtime.clone(); - move |body| prove(body, runtime, output_dir.clone(), prover_config) - }), + post(move |body| prove(body, proof_runtime, output_dir.clone(), prover_config)), ); let listener = tokio::net::TcpListener::bind(&addr).await?; Ok(axum::serve(listener, app).await?) @@ -62,7 +60,7 @@ struct HttpProverInput { async fn prove( Json(payload): Json, - runtime: Arc, + proof_runtime: Arc, output_dir: PathBuf, prover_config: Arc, ) -> StatusCode { @@ -74,7 +72,7 @@ async fn prove( payload .prover_input .prove_test( - runtime, + proof_runtime, payload.previous.map(futures::future::ok), prover_config, ) @@ -83,7 +81,7 @@ async fn prove( payload .prover_input .prove( - runtime, + proof_runtime, payload.previous.map(futures::future::ok), prover_config, ) diff --git a/zero/src/bin/leader/stdio.rs b/zero/src/bin/leader/stdio.rs index 306a08c65..9e451e13f 100644 --- a/zero/src/bin/leader/stdio.rs +++ b/zero/src/bin/leader/stdio.rs @@ -2,15 +2,16 @@ use std::io::Read; use std::sync::Arc; use anyhow::{anyhow, Result}; -use paladin::runtime::Runtime; use tokio::sync::mpsc; use tracing::info; use zero::proof_types::GeneratedBlockProof; use zero::prover::{self, BlockProverInput, ProverConfig}; +use crate::ProofRuntime; + /// The main function for the stdio mode. pub(crate) async fn stdio_main( - runtime: Arc, + proof_runtime: Arc, previous: Option, prover_config: Arc, ) -> Result<()> { @@ -24,9 +25,14 @@ pub(crate) async fn stdio_main( let (block_tx, block_rx) = mpsc::channel::<(BlockProverInput, bool)>(zero::BLOCK_CHANNEL_SIZE); - let runtime_ = runtime.clone(); + let proof_runtime_ = proof_runtime.clone(); let prover_config_ = prover_config.clone(); - let proving_task = tokio::spawn(prover::prove(block_rx, runtime_, previous, prover_config_)); + let proving_task = tokio::spawn(prover::prove( + block_rx, + proof_runtime_, + previous, + prover_config_, + )); let interval_len = block_prover_inputs.len(); for (index, block_prover_input) in block_prover_inputs.into_iter().enumerate() { @@ -48,7 +54,8 @@ pub(crate) async fn stdio_main( } } - runtime.close().await?; + proof_runtime.light_proof.close().await?; + proof_runtime.heavy_proof.close().await?; if prover_config.test_only { info!("All proof witnesses have been generated successfully."); diff --git a/zero/src/bin/rpc.rs b/zero/src/bin/rpc.rs index e87e9e34d..fa9d9eab1 100644 --- a/zero/src/bin/rpc.rs +++ b/zero/src/bin/rpc.rs @@ -16,6 +16,7 @@ use zero::block_interval::BlockInterval; use zero::block_interval::BlockIntervalStream; use zero::parsing::parse_duration; use zero::prover::BlockProverInput; +use zero::prover::WIRE_DISPOSITION; use zero::provider::CachedProvider; use zero::rpc; use zero::rpc::JumpdestSrc; @@ -194,6 +195,7 @@ impl Cli { block_prover_input.other_data, batch_size, &mut DummyObserver::new(), + WIRE_DISPOSITION, )?; if let Some(index) = tx_info.transaction_index { diff --git a/zero/src/bin/trie_diff.rs b/zero/src/bin/trie_diff.rs index 4c00d2ca3..c211cc528 100644 --- a/zero/src/bin/trie_diff.rs +++ b/zero/src/bin/trie_diff.rs @@ -26,6 +26,7 @@ use regex::Regex; use trace_decoder::observer::TriesObserver; use tracing::{error, info}; use zero::ops::register; +use zero::prover::WIRE_DISPOSITION; use zero::prover::{cli::CliProverConfig, BlockProverInput, ProverConfig}; /// This binary is a debugging tool used to compare @@ -97,6 +98,7 @@ async fn main() -> Result<()> { block_prover_input.other_data.clone(), prover_config.batch_size, &mut observer, + WIRE_DISPOSITION, )?; info!( "Number of collected batch tries for block {}: {}", diff --git a/zero/src/prover.rs b/zero/src/prover.rs index 6a194ddf1..7cc840f02 100644 --- a/zero/src/prover.rs +++ b/zero/src/prover.rs @@ -9,9 +9,13 @@ use std::sync::Arc; use alloy::primitives::U256; use anyhow::{Context, Result}; use evm_arithmetization::Field; -use futures::{future::BoxFuture, FutureExt, TryFutureExt, TryStreamExt}; +use evm_arithmetization::SegmentDataIterator; +use futures::{ + future, future::BoxFuture, stream::FuturesUnordered, FutureExt, TryFutureExt, TryStreamExt, +}; use hashbrown::HashMap; use num_traits::ToPrimitive as _; +use paladin::directive::{Directive, IndexedStream}; use paladin::runtime::Runtime; use plonky2::gates::noop::NoopGate; use plonky2::plonk::circuit_builder::CircuitBuilder; @@ -21,13 +25,25 @@ use tokio::io::AsyncWriteExt; use tokio::sync::mpsc::Receiver; use tokio::sync::{oneshot, Semaphore}; use trace_decoder::observer::DummyObserver; -use trace_decoder::{BlockTrace, OtherBlockData}; +use trace_decoder::{BlockTrace, OtherBlockData, WireDisposition}; use tracing::{error, info}; use crate::fs::generate_block_proof_file_name; use crate::ops; use crate::proof_types::GeneratedBlockProof; +/// `ProofRuntime` represents the runtime environments used for generating +/// different types of proofs. It contains separate runtimes for handling: +/// +/// - `light_proof`: Typically for smaller, less resource-intensive tasks, such +/// as aggregation. +/// - `heavy_proof`: For larger, more computationally expensive tasks, such as +/// STARK proof generation. +pub struct ProofRuntime { + pub light_proof: Runtime, + pub heavy_proof: Runtime, +} + // All proving tasks are executed concurrently, which can cause issues for large // block intervals, where distant future blocks may be proven first. // @@ -39,6 +55,18 @@ use crate::proof_types::GeneratedBlockProof; // batches as soon as they are generated. static PARALLEL_BLOCK_PROVING_PERMIT_POOL: Semaphore = Semaphore::const_new(0); +pub const WIRE_DISPOSITION: WireDisposition = { + cfg_if::cfg_if! { + if #[cfg(feature = "eth_mainnet")] { + WireDisposition::Type1 + } else if #[cfg(feature = "cdk_erigon")] { + WireDisposition::Type2 + } else { + compile_error!("must select a feature"); + } + } +}; + #[derive(Debug, Clone)] pub struct ProverConfig { pub batch_size: usize, @@ -65,14 +93,11 @@ impl BlockProverInput { pub async fn prove( self, - runtime: Arc, + proof_runtime: Arc, previous: Option>>, prover_config: Arc, ) -> Result { use anyhow::Context as _; - use evm_arithmetization::SegmentDataIterator; - use futures::{stream::FuturesUnordered, FutureExt}; - use paladin::directive::{Directive, IndexedStream}; let ProverConfig { max_cpu_len_log, @@ -88,6 +113,7 @@ impl BlockProverInput { self.other_data, batch_size, &mut DummyObserver::new(), + WIRE_DISPOSITION, )?; // Create segment proof. @@ -116,7 +142,7 @@ impl BlockProverInput { Directive::map(IndexedStream::from(segment_data_iterator), &seg_prove_ops) .fold(&seg_agg_ops) - .run(&runtime) + .run(&proof_runtime.heavy_proof) .map(move |e| { e.map(|p| (idx, crate::proof_types::BatchAggregatableProof::from(p))) }) @@ -126,7 +152,7 @@ impl BlockProverInput { // Fold the batch aggregated proof stream into a single proof. let final_batch_proof = Directive::fold(IndexedStream::new(batch_proof_futs), &batch_agg_ops) - .run(&runtime) + .run(&proof_runtime.light_proof) .await?; if let crate::proof_types::BatchAggregatableProof::BatchAgg(proof) = final_batch_proof { @@ -143,7 +169,7 @@ impl BlockProverInput { prev, save_inputs_on_error, }) - .run(&runtime) + .run(&proof_runtime.light_proof) .await?; info!("Successfully proved block {block_number}"); @@ -156,13 +182,12 @@ impl BlockProverInput { pub async fn prove_test( self, - runtime: Arc, + proof_runtime: Arc, previous: Option>>, prover_config: Arc, ) -> Result { use std::iter::repeat; - use futures::future; use paladin::directive::{Directive, IndexedStream}; let ProverConfig { @@ -181,6 +206,7 @@ impl BlockProverInput { self.other_data, batch_size, &mut DummyObserver::new(), + WIRE_DISPOSITION, )?; let seg_ops = ops::SegmentProofTestOnly { @@ -202,7 +228,7 @@ impl BlockProverInput { ); simulation - .run(&runtime) + .run(&proof_runtime.light_proof) .await? .try_for_each(|_| future::ok(())) .await?; @@ -236,17 +262,17 @@ impl BlockProverInput { async fn prove_block( block: BlockProverInput, - runtime: Arc, + proof_runtime: Arc, previous_block_proof: Option>>, prover_config: Arc, ) -> Result { if prover_config.test_only { block - .prove_test(runtime, previous_block_proof, prover_config) + .prove_test(proof_runtime, previous_block_proof, prover_config) .await } else { block - .prove(runtime, previous_block_proof, prover_config) + .prove(proof_runtime, previous_block_proof, prover_config) .await } } @@ -257,7 +283,7 @@ async fn prove_block( /// block proofs as well. pub async fn prove( mut block_receiver: Receiver<(BlockProverInput, bool)>, - runtime: Arc, + proof_runtime: Arc, checkpoint_proof: Option, prover_config: Arc, ) -> Result<()> { @@ -277,7 +303,7 @@ pub async fn prove( let (tx, rx) = oneshot::channel::(); let prover_config = prover_config.clone(); let previous_block_proof = prev_proof.take(); - let runtime = runtime.clone(); + let proof_runtime = proof_runtime.clone(); let block_number = block_prover_input.get_block_number(); let prove_permit = PARALLEL_BLOCK_PROVING_PERMIT_POOL.acquire().await?; @@ -288,7 +314,7 @@ pub async fn prove( // Prove the block let block_proof = prove_block( block_prover_input, - runtime, + proof_runtime, previous_block_proof, prover_config.clone(), ) diff --git a/zero/src/prover_state/mod.rs b/zero/src/prover_state/mod.rs index edbad02ba..8cdfb45ce 100644 --- a/zero/src/prover_state/mod.rs +++ b/zero/src/prover_state/mod.rs @@ -174,45 +174,30 @@ impl ProverStateManager { &self, config: &StarkConfig, all_proof: &AllProof, - ) -> anyhow::Result<[(RecursiveCircuitsForTableSize, u8); NUM_TABLES]> { + ) -> anyhow::Result<[Option<(RecursiveCircuitsForTableSize, u8)>; NUM_TABLES]> { let degrees = all_proof.degree_bits(config); - /// Given a recursive circuit index (e.g., Arithmetic / 0), return a - /// tuple containing the loaded table at the specified size and - /// its offset relative to the configured range used to pre-process the - /// circuits. - macro_rules! circuit { - ($circuit_index:expr) => { - ( - RecursiveCircuitResource::get(&( - $circuit_index.into(), - degrees[$circuit_index], + // Given a recursive circuit index (e.g., Arithmetic / 0), return a + // tuple containing the loaded table at the specified size and + // its offset relative to the configured range used to pre-process the + // circuits. + let circuits = core::array::from_fn(|i| match degrees[i] { + Some(size) => RecursiveCircuitResource::get(&(i.into(), size)) + .map(|circuit_resource| { + Some(( + circuit_resource, + (size - self.circuit_config[i].start) as u8, )) - .map_err(|e| { - let circuit: $crate::prover_state::circuit::Circuit = $circuit_index.into(); - let size = degrees[$circuit_index]; - anyhow::Error::from(e).context(format!( - "Attempting to load circuit: {circuit:?} at size: {size}" - )) - })?, - (degrees[$circuit_index] - self.circuit_config[$circuit_index].start) as u8, - ) - }; - } + }) + .map_err(|e| { + anyhow::Error::from(e) + .context(format!("Attempting to load circuit: {i} at size: {size}")) + }) + .unwrap_or(None), + None => None, + }); - Ok([ - circuit!(0), - circuit!(1), - circuit!(2), - circuit!(3), - circuit!(4), - circuit!(5), - circuit!(6), - circuit!(7), - circuit!(8), - #[cfg(feature = "cdk_erigon")] - circuit!(9), - ]) + Ok(circuits) } /// Generate a segment proof using the specified input, loading