From 061fb0c14fe51cbc67dc64bcd56a817d4d8f3be1 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Wed, 26 Nov 2025 18:15:00 +0530 Subject: [PATCH 01/16] feat: FRI commitment scheme - Added a FRI commitment scheme - Implemented core structures and logic for FRI including configuration, commitment, proof generation, and verification. - Added encoding utilities for converting byte data to packed multilinear elements. - Created error handling for various FRI operations. - Developed end-to-end tests for FRI commitment, proof generation, and verification processes. - Included Merkle inclusion proof verification for committed codewords. --- Cargo.lock | 374 +++++++++++++++++++++++++++++++++++++----- Cargo.toml | 9 +- build_test.sh | 6 + fri/Cargo.toml | 30 ++++ fri/src/config.rs | 12 ++ fri/src/core.rs | 303 ++++++++++++++++++++++++++++++++++ fri/src/encoding.rs | 102 ++++++++++++ fri/src/error.rs | 35 ++++ fri/src/lib.rs | 93 +++++++++++ fri/src/sampling.rs | 90 ++++++++++ fri/src/tests.rs | 105 ++++++++++++ fri/src/transcript.rs | 38 +++++ 12 files changed, 1153 insertions(+), 44 deletions(-) create mode 100644 fri/Cargo.toml create mode 100644 fri/src/config.rs create mode 100644 fri/src/core.rs create mode 100644 fri/src/encoding.rs create mode 100644 fri/src/error.rs create mode 100644 fri/src/lib.rs create mode 100644 fri/src/sampling.rs create mode 100644 fri/src/tests.rs create mode 100644 fri/src/transcript.rs diff --git a/Cargo.lock b/Cargo.lock index f90ab691..17128be1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -52,7 +52,7 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom", + "getrandom 0.2.15", "once_cell", "version_check", ] @@ -64,7 +64,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42cd52102d3df161c77a887b608d7a4897d7cc112886a9537b738a887a03aaff" dependencies = [ "cfg-if", - "getrandom", + "getrandom 0.2.15", "once_cell", "version_check", "zerocopy", @@ -137,7 +137,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21cc1548309245035eb18aa7f0967da6bc65587005170c56e6ef2788a4cf3f4e" dependencies = [ "include_dir", - "itertools", + "itertools 0.10.5", "proc-macro-error", "proc-macro2", "quote", @@ -230,7 +230,7 @@ dependencies = [ "ark-std", "derivative", "hashbrown 0.13.2", - "itertools", + "itertools 0.10.5", "num-traits", "rayon", "zeroize", @@ -298,7 +298,7 @@ dependencies = [ "ark-std", "derivative", "digest 0.10.7", - "itertools", + "itertools 0.10.5", "num-bigint", "num-traits", "paste", @@ -414,7 +414,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", "rayon", ] @@ -491,6 +491,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "auto_impl" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "autocfg" version = "1.1.0" @@ -517,7 +528,7 @@ dependencies = [ "num-traits", "parity-scale-codec", "primitive-types", - "rand", + "rand 0.8.5", "scale-info", "serde", "serde_json", @@ -563,7 +574,7 @@ dependencies = [ "ark-serialize", "ark-std", "dleq_vrf", - "rand_chacha", + "rand_chacha 0.3.1", "rand_core 0.6.4", "ring", "sha2 0.10.8", @@ -608,6 +619,119 @@ dependencies = [ "serde", ] +[[package]] +name = "binius-core" +version = "0.1.0" +source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +dependencies = [ + "binius-utils", + "bytemuck", + "bytes", + "thiserror 2.0.11", +] + +[[package]] +name = "binius-field" +version = "0.1.0" +source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +dependencies = [ + "binius-utils", + "bytemuck", + "cfg-if", + "derive_more", + "rand 0.9.2", + "seq-macro", + "thiserror 2.0.11", +] + +[[package]] +name = "binius-math" +version = "0.1.0" +source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +dependencies = [ + "binius-field", + "binius-utils", + "bytemuck", + "getset", + "itertools 0.14.0", + "rand 0.9.2", + "thiserror 2.0.11", + "tracing", + "uninit", +] + +[[package]] +name = "binius-prover" +version = "0.1.0" +source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +dependencies = [ + "binius-core", + "binius-field", + "binius-math", + "binius-transcript", + "binius-utils", + "binius-verifier", + "bytemuck", + "bytes", + "derive_more", + "digest 0.10.7", + "either", + "getset", + "itertools 0.14.0", + "rand 0.9.2", + "thiserror 2.0.11", + "tracing", +] + +[[package]] +name = "binius-transcript" +version = "0.1.0" +source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +dependencies = [ + "auto_impl", + "binius-field", + "binius-utils", + "bytes", + "digest 0.10.7", + "thiserror 2.0.11", + "tracing", +] + +[[package]] +name = "binius-utils" +version = "0.1.0" +source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +dependencies = [ + "bytemuck", + "bytes", + "cfg-if", + "generic-array 0.14.7", + "itertools 0.14.0", + "rayon", + "thiserror 2.0.11", + "trait-set", +] + +[[package]] +name = "binius-verifier" +version = "0.1.0" +source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +dependencies = [ + "binius-core", + "binius-field", + "binius-math", + "binius-transcript", + "binius-utils", + "bytemuck", + "bytes", + "digest 0.10.7", + "getset", + "itertools 0.14.0", + "sha2 0.10.8", + "thiserror 2.0.11", + "tracing", +] + [[package]] name = "bip39" version = "2.0.0" @@ -615,7 +739,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f2635620bf0b9d4576eb7bb9a38a55df78bd1205d26fa994b25911a69f212f" dependencies = [ "bitcoin_hashes", - "rand", + "rand 0.8.5", "rand_core 0.6.4", "serde", "unicode-normalization", @@ -778,9 +902,23 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "bytemuck" -version = "1.14.3" +version = "1.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ef034f05691a48569bd920a96c81b9d91bbad1ab5ac7c4616c1f6ef36cb79f" +checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] [[package]] name = "byteorder" @@ -790,9 +928,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.5.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "cast" @@ -926,7 +1064,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom", + "getrandom 0.2.15", "once_cell", "tiny-keccak", ] @@ -943,6 +1081,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd7e35aee659887cbfb97aaf227ac12cad1a9d7c71e55ff3376839ed4e282d08" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation-sys" version = "0.8.6" @@ -997,7 +1141,7 @@ dependencies = [ "ciborium", "clap", "criterion-plot", - "itertools", + "itertools 0.10.5", "lazy_static", "num-traits", "oorandom", @@ -1017,7 +1161,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools", + "itertools 0.10.5", ] [[package]] @@ -1173,8 +1317,10 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ + "convert_case", "proc-macro2", "quote", + "rustc_version", "syn 1.0.109", ] @@ -1333,9 +1479,9 @@ dependencies = [ [[package]] name = "either" -version = "1.10.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "elliptic-curve" @@ -1477,7 +1623,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand", + "rand 0.8.5", "rustc-hex", "static_assertions", ] @@ -1560,7 +1706,7 @@ dependencies = [ "derive-syn-parse", "expander", "frame-support-procedural-tools", - "itertools", + "itertools 0.10.5", "macro_magic", "proc-macro-warning", "proc-macro2", @@ -1591,6 +1737,20 @@ dependencies = [ "syn 2.0.96", ] +[[package]] +name = "fri" +version = "0.1.0" +dependencies = [ + "binius-field", + "binius-math", + "binius-prover", + "binius-transcript", + "binius-verifier", + "log", + "rand 0.9.2", + "rayon", +] + [[package]] name = "fs-err" version = "2.11.0" @@ -1727,16 +1887,40 @@ dependencies = [ "wasi", ] +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + [[package]] name = "getrandom_or_panic" version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1015b5a70616b688dc230cfe50c8af89d972cb132d5a622814d29773b10b9" dependencies = [ - "rand", + "rand 0.8.5", "rand_core 0.6.4", ] +[[package]] +name = "getset" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf0fc11e47561d47397154977bc219f4cf809b2974facc3ccb3b89e2436f912" +dependencies = [ + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "gimli" version = "0.27.3" @@ -2034,6 +2218,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.10" @@ -2079,8 +2272,8 @@ dependencies = [ "parity-scale-codec", "poly-multiproof", "proptest", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rayon", "serde", "serde_json", @@ -2103,8 +2296,8 @@ dependencies = [ "once_cell", "parity-scale-codec", "poly-multiproof", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "serde", "sp-std", "static_assertions", @@ -2152,7 +2345,7 @@ dependencies = [ "libsecp256k1-core", "libsecp256k1-gen-ecmult", "libsecp256k1-gen-genmult", - "rand", + "rand 0.8.5", "serde", "sha2 0.9.9", "typenum", @@ -2703,6 +2896,28 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "proc-macro-warning" version = "1.0.2" @@ -2734,8 +2949,8 @@ dependencies = [ "bitflags 2.4.2", "lazy_static", "num-traits", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_xorshift", "regex-syntax 0.8.2", "rusty-fork", @@ -2767,6 +2982,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "radium" version = "0.7.0" @@ -2780,10 +3001,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", + "rand_chacha 0.3.1", "rand_core 0.6.4", ] +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + [[package]] name = "rand_chacha" version = "0.3.1" @@ -2794,6 +3025,16 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + [[package]] name = "rand_core" version = "0.5.1" @@ -2806,7 +3047,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.15", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", ] [[package]] @@ -2826,9 +3076,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.8.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -2836,9 +3086,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -3159,6 +3409,12 @@ version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +[[package]] +name = "seq-macro" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc711410fbe7399f390ca1c3b60ad0f53f80e95c5eb935e52268a0e2cd49acc" + [[package]] name = "serde" version = "1.0.196" @@ -3404,7 +3660,7 @@ dependencies = [ "hash-db", "hash256-std-hasher", "impl-serde 0.4.0", - "itertools", + "itertools 0.10.5", "libsecp256k1", "log", "merlin 3.0.0", @@ -3412,7 +3668,7 @@ dependencies = [ "parking_lot", "paste", "primitive-types", - "rand", + "rand 0.8.5", "scale-info", "schnorrkel 0.11.4", "secp256k1", @@ -3592,7 +3848,7 @@ dependencies = [ "log", "parity-scale-codec", "paste", - "rand", + "rand 0.8.5", "scale-info", "serde", "simple-mermaid", @@ -3658,7 +3914,7 @@ dependencies = [ "log", "parity-scale-codec", "parking_lot", - "rand", + "rand 0.8.5", "smallvec", "sp-core", "sp-externalities", @@ -3712,7 +3968,7 @@ dependencies = [ "nohash-hasher", "parity-scale-codec", "parking_lot", - "rand", + "rand 0.8.5", "scale-info", "schnellru", "sp-core", @@ -4165,6 +4421,17 @@ dependencies = [ "tracing-serde", ] +[[package]] +name = "trait-set" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "trie-db" version = "0.28.0" @@ -4215,7 +4482,7 @@ checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", "digest 0.10.7", - "rand", + "rand 0.8.5", "static_assertions", ] @@ -4270,6 +4537,12 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +[[package]] +name = "uninit" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "359fdaaabedff944f100847f2e0ea88918d8012fe64baf5b54c191ad010168c9" + [[package]] name = "url" version = "2.5.0" @@ -4308,8 +4581,8 @@ dependencies = [ "arrayref", "constcat", "digest 0.10.7", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_core 0.6.4", "sha2 0.10.8", "sha3", @@ -4342,6 +4615,15 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" version = "0.2.91" @@ -4518,7 +4800,7 @@ dependencies = [ "memfd", "memoffset", "paste", - "rand", + "rand 0.8.5", "rustix 0.36.17", "wasmtime-asm-macros", "wasmtime-environ", @@ -4804,6 +5086,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index fb18a2b8..421b6f45 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] resolver = "2" -members = ["core", "kate/recovery", "kate"] +members = ["core", "kate/recovery", "kate", "fri"] [workspace.dependencies] # Parity @@ -50,6 +50,13 @@ sha3 = { version = "0.10.0", default-features = false } poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", rev="c2794e32ca040e6b2544abde18b7187af3e66feb", default-features = false, features = ["ark-bls12-381", "blst"]} hash-db = { version = "0.16.0", default-features = false } +# fri +binius-prover = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } +binius-verifier = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } +binius-math = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } +binius-field = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } +binius-transcript = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } + # Others rayon = "1.5.2" once_cell = "1.8.0" diff --git a/build_test.sh b/build_test.sh index 936e7511..77a327d0 100755 --- a/build_test.sh +++ b/build_test.sh @@ -33,3 +33,9 @@ cargo check --no-default-features --features "std" cargo check --no-default-features --features "std, serde" cargo check --target wasm32-unknown-unknown --no-default-features cargo check --target wasm32-unknown-unknown --no-default-features --features "serde" + + +# Fri +cd ../fri +cargo check +cargo check --no-default-features --features "std" \ No newline at end of file diff --git a/fri/Cargo.toml b/fri/Cargo.toml new file mode 100644 index 00000000..082dcb6e --- /dev/null +++ b/fri/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "fri" +version = "0.1.0" +edition = "2021" + +[dependencies] +binius-prover = { workspace = true } +binius-verifier = { workspace = true } +binius-math = { workspace = true } +binius-field = { workspace = true } +binius-transcript = { workspace = true } + +rayon = { version = "1.10", optional = true } +log = { workspace = true, optional = true } + +[dev-dependencies] +rand = { version = "0.9.1", default-features = false, features = ["std", "std_rng"] } + +[features] +default = ["std"] + +# This crate is node-side only currently +std = [ + "log", +] + +# Enable rayon-based parallel code paths +parallel = ["std", "rayon"] + +testing = [] diff --git a/fri/src/config.rs b/fri/src/config.rs new file mode 100644 index 00000000..dcb5cd7d --- /dev/null +++ b/fri/src/config.rs @@ -0,0 +1,12 @@ +/// Parameters that Avail config / node code will set. +#[derive(Clone, Copy, Debug)] +pub struct FriParamsConfig { + /// log2(1 / Reed–Solomon code rate). + pub log_inv_rate: usize, + /// Number of FRI test queries (soundness parameter). + pub num_test_queries: usize, + /// log2(number of “shares” / repetitions). + pub log_num_shares: usize, + /// Number of multilinear variables (depends on data size). + pub n_vars: usize, +} diff --git a/fri/src/core.rs b/fri/src/core.rs new file mode 100644 index 00000000..f65a5963 --- /dev/null +++ b/fri/src/core.rs @@ -0,0 +1,303 @@ +use crate::config::FriParamsConfig; +use crate::error::FriBiniusError; +use crate::transcript::{Challenger, VerifierTr}; + +use binius_field::{ExtensionField, Field, PackedExtension, PackedField}; +use binius_math::{ + BinarySubspace, FieldBuffer, ReedSolomonCode, + inner_product::inner_product, + multilinear::eq::eq_ind_partial_eval, + ntt::{ + NeighborsLastMultiThread, + domain_context::{self, GenericPreExpanded}, + }, +}; +use binius_prover::{ + fri::CommitOutput, + hash::parallel_compression::ParallelCompressionAdaptor, + merkle_tree::{MerkleTreeProver, prover::BinaryMerkleTreeProver}, + pcs::OneBitPCSProver, +}; +use binius_transcript::ProverTranscript; +use binius_verifier::{ + config::{B1, B128}, + fri::FRIParams, + hash::{StdCompression, StdDigest}, + merkle_tree::MerkleTreeScheme, + pcs::verify as fri_verify, +}; + +#[cfg(test)] +use rand::{CryptoRng, RngCore}; +#[cfg(test)] +use binius_field::Random; + +// Concrete merkle prover type we’ll use everywhere. +pub type DefaultMerkleProver = + BinaryMerkleTreeProver>; + +/// The committed Merkle-tree data type for a given scalar field. +pub type MerkleCommitted = , +> as MerkleTreeProver>::Committed; + +/// Our PCS commit output type specialization. +pub type FriCommitOutput

= CommitOutput, MerkleCommitted<

::Scalar>>; + +/// Commitment object that we can serialize. +#[derive(Clone, Debug)] +pub struct FriCommitment { + pub digest: [u8; 32], +} + +/// Evaluation proof object that we can propagate. +#[derive(Clone, Debug)] +pub struct FriProof { + pub commitment: FriCommitment, + pub evaluation_point: Vec, + pub evaluation_claim: B128, + pub transcript_bytes: Vec, +} + +/// Context holding FRI parameters + NTT domain. +pub struct FriContext { + pub fri_params: FRIParams, + pub ntt: NeighborsLastMultiThread>, +} + +pub struct FriBiniusPCS { + cfg: FriParamsConfig, + merkle_prover: DefaultMerkleProver, +} + +impl FriBiniusPCS { + pub fn new(cfg: FriParamsConfig) -> Self { + Self { + merkle_prover: DefaultMerkleProver::new(ParallelCompressionAdaptor::new( + StdCompression::default(), + )), + cfg, + } + } + + pub fn initialize_fri_context

( + &self, + packed_buffer: &FieldBuffer

, + ) -> Result + where + P: PackedField + PackedExtension + PackedExtension, + { + let committed_rs_code = + ReedSolomonCode::::new(packed_buffer.log_len(), self.cfg.log_inv_rate) + .map_err(|e| FriBiniusError::ReedSolomonInit(e.to_string()))?; + + let fri_log_batch_size = 0; + + let fri_arities = if P::LOG_WIDTH == 2 { + vec![2, 2] + } else { + vec![2; packed_buffer.log_len() / 2] + }; + + let fri_params = FRIParams::new( + committed_rs_code.clone(), + fri_log_batch_size, + fri_arities, + self.cfg.num_test_queries, + ) + .map_err(|e| FriBiniusError::FriParamsInit(e.to_string()))?; + + let subspace = BinarySubspace::with_dim(fri_params.rs_code().log_len()) + .map_err(|e| FriBiniusError::DomainInit(e.to_string()))?; + + let domain_context = domain_context::GenericPreExpanded::generate_from_subspace(&subspace); + let ntt = NeighborsLastMultiThread::new(domain_context, self.cfg.log_num_shares); + + Ok(FriContext { fri_params, ntt }) + } + + #[cfg(test)] + pub fn sample_evaluation_point(&self, rng: &mut R) -> Vec { + let mut point = Vec::with_capacity(self.cfg.n_vars); + for _ in 0..self.cfg.n_vars { + point.push(B128::random(&mut *rng)); + } + point + } + + pub fn calculate_evaluation_claim( + &self, + values: &[B128], + evaluation_point: &[B128], + ) -> Result { + // convert large-field MLE -> small-field MLE over B1 + let small_mle = large_field_mle_to_small_field::(values); + let lifted = lift_small_to_large_field::(&small_mle); + + let eq_vals = eq_ind_partial_eval(evaluation_point).as_ref().to_vec(); + + Ok(inner_product::(lifted, eq_vals)) + } + + pub fn commit

( + &self, + packed_mle: &FieldBuffer

, + ctx: &FriContext, + ) -> Result, FriBiniusError> + where + P: PackedField + PackedExtension + PackedExtension, + { + let pcs = OneBitPCSProver::new(&ctx.ntt, &self.merkle_prover, &ctx.fri_params); + + let commit_output = pcs + .commit(packed_mle.clone()) + .map_err(|e| FriBiniusError::Commitment(e.to_string()))?; + + Ok(CommitOutput { + codeword: commit_output.codeword, + commitment: commit_output.commitment.to_vec(), + committed: commit_output.committed, + }) + } + + /// Generate a FRI evaluation proof. + pub fn prove

( + &self, + values: &[B128], + packed_mle: &FieldBuffer

, + ctx: &FriContext, + commit_output: &FriCommitOutput

, + evaluation_point: &[B128], + ) -> Result + where + P: PackedField + PackedExtension + PackedExtension, + { + // 1) Compute evaluation claim from scalar values + let evaluation_claim = self.calculate_evaluation_claim(values, evaluation_point)?; + + // 2) Set up PCS prover and transcript + let pcs = OneBitPCSProver::new(&ctx.ntt, &self.merkle_prover, &ctx.fri_params); + let mut prover_transcript = ProverTranscript::new(Challenger::default()); + + // First write commitment bytes into transcript + prover_transcript + .message() + .write_bytes(&commit_output.commitment); + + // 3) Run FRI proof generation + pcs.prove( + &commit_output.codeword, + &commit_output.committed, + packed_mle.clone(), + evaluation_point.to_vec(), + &mut prover_transcript, + ) + .map_err(|e| FriBiniusError::Proof(e.to_string()))?; + + // 4) Turn prover transcript into verifier transcript and serialize it + let verifier_transcript: VerifierTr = prover_transcript.into_verifier(); + let transcript_bytes = crate::transcript::transcript_to_bytes(&verifier_transcript); + + // 5) Extract commitment digest as [u8; 32] + let digest: [u8; 32] = commit_output + .commitment + .as_slice() + .try_into() + .expect("commitment is 32 bytes by construction"); + + Ok(FriProof { + commitment: FriCommitment { digest }, + evaluation_point: evaluation_point.to_vec(), + evaluation_claim, + transcript_bytes, + }) + } + + /// Verify a proof produced by `prove`. + pub fn verify(&self, proof: &FriProof, ctx: &FriContext) -> Result<(), FriBiniusError> { + // Reconstruct transcript from bytes + let mut transcript = + crate::transcript::transcript_from_bytes(proof.transcript_bytes.clone()); + + let retrieved_commitment = transcript + .message() + .read() + .map_err(|e| FriBiniusError::Transcript(e.to_string()))?; + + let merkle_scheme = self.merkle_prover.scheme().clone(); + + // Call the Binius FRI verification routine + fri_verify( + &mut transcript, + proof.evaluation_claim, + &proof.evaluation_point, + retrieved_commitment, + &ctx.fri_params, + &merkle_scheme, + ) + .map_err(|e| FriBiniusError::Verification(e.to_string())) + } + + /// Inclusion proof: Merkle opening for a particular codeword index. + pub fn inclusion_proof

( + &self, + committed: &MerkleCommitted, + index: usize, + ) -> Result + where + P: PackedField + PackedExtension + PackedExtension, + { + let mut proof_writer = ProverTranscript::new(Challenger::default()); + self.merkle_prover + .prove_opening(committed, 0, index, &mut proof_writer.message()) + .map_err(|e| FriBiniusError::Merkle(e.to_string()))?; + + Ok(proof_writer.into_verifier()) + } + + /// Verify inclusion proof for a given leaf. + pub fn verify_inclusion_proof( + &self, + verifier_transcript: &mut VerifierTr, + data: &[B128], + index: usize, + ctx: &FriContext, + commitment: &FriCommitment, + ) -> Result<(), FriBiniusError> { + let tree_depth = ctx.fri_params.rs_code().log_len(); + + self.merkle_prover + .scheme() + .verify_opening( + index, + data, + 0, + tree_depth, + &[commitment.digest.into()], + &mut verifier_transcript.message(), + ) + .map_err(|e| FriBiniusError::Merkle(e.to_string())) + } +} + +// Helper conversions at bottom +fn lift_small_to_large_field(small_field_elms: &[F]) -> Vec +where + F: Field, + FE: Field + ExtensionField, +{ + small_field_elms.iter().map(|&elm| FE::from(elm)).collect() +} + +fn large_field_mle_to_small_field(large_field_mle: &[FE]) -> Vec +where + F: Field, + FE: Field + ExtensionField, +{ + large_field_mle + .iter() + .flat_map(|elm| ExtensionField::::iter_bases(elm)) + .collect() +} diff --git a/fri/src/encoding.rs b/fri/src/encoding.rs new file mode 100644 index 00000000..8b9ba018 --- /dev/null +++ b/fri/src/encoding.rs @@ -0,0 +1,102 @@ +use crate::error::FriBiniusError; +use binius_field::{ExtensionField, PackedField}; +use binius_math::FieldBuffer; +use binius_verifier::config::B1; +use core::marker::PhantomData; + +const BYTES_PER_ELEMENT: usize = 16; // 128 bits +const BITS_PER_ELEMENT: usize = 128; + +pub struct BytesEncoder

{ + log_scalar_bit_width: usize, + _p: PhantomData

, +} + +pub struct PackedMLE

+where + P: PackedField + ExtensionField, + P::Scalar: From + ExtensionField, +{ + pub packed_mle: FieldBuffer

, + pub packed_values: Vec, + pub total_n_vars: usize, +} + +impl

BytesEncoder

+where + P: PackedField + ExtensionField, + P::Scalar: From + ExtensionField, +{ + pub fn new() -> Self { + Self { + log_scalar_bit_width: >::LOG_DEGREE, + _p: PhantomData, + } + } + + pub fn bytes_to_packed_mle(&self, data: &[u8]) -> Result, FriBiniusError> { + if data.is_empty() { + return Err(FriBiniusError::InvalidInput("input data must be non-empty")); + } + + // Number of 128-bit field elements needed + let num_elements = (data.len() * 8).div_ceil(BITS_PER_ELEMENT); + let padded_size = num_elements.next_power_of_two(); + + let big_field_n_vars = padded_size.ilog2() as usize; + log::debug!("N vars (big field): {big_field_n_vars}"); + + let packed_size = 1 << big_field_n_vars; + log::debug!("Packed size: {packed_size}"); + + #[cfg(feature = "parallel")] + let mut packed_values: Vec = { + use rayon::prelude::*; + data.par_chunks(BYTES_PER_ELEMENT) + .map(|chunk| { + let mut bytes_array = [0u8; BYTES_PER_ELEMENT]; + bytes_array[..chunk.len()].copy_from_slice(chunk); + P::Scalar::from(u128::from_le_bytes(bytes_array)) + }) + .collect() + }; + + #[cfg(not(feature = "parallel"))] + let mut packed_values: Vec = { + let mut values = Vec::with_capacity(num_elements); + for chunk in data.chunks(BYTES_PER_ELEMENT) { + let mut bytes_array = [0u8; BYTES_PER_ELEMENT]; + bytes_array[..chunk.len()].copy_from_slice(chunk); + let scalar = P::Scalar::from(u128::from_le_bytes(bytes_array)); + values.push(scalar); + } + values + }; + + log::debug!("Packed values before padding: {}", packed_values.len()); + packed_values.resize(packed_size, P::Scalar::zero()); + log::debug!("Packed values after padding: {}", packed_values.len()); + + let packed_mle = FieldBuffer::

::from_values(&packed_values) + .map_err(|e| FriBiniusError::Encoding(e.to_string()))?; + + let big_field_n_vars = packed_mle.log_len(); + let total_n_vars = big_field_n_vars + self.log_scalar_bit_width; + + Ok(PackedMLE::

{ + packed_mle, + packed_values, + total_n_vars, + }) + } +} + +impl

Default for BytesEncoder

+where + P: PackedField + ExtensionField, + P::Scalar: From + ExtensionField, +{ + fn default() -> Self { + Self::new() + } +} diff --git a/fri/src/error.rs b/fri/src/error.rs new file mode 100644 index 00000000..c332339c --- /dev/null +++ b/fri/src/error.rs @@ -0,0 +1,35 @@ +use core::fmt; + +#[derive(Debug)] +pub enum FriBiniusError { + ReedSolomonInit(String), + FriParamsInit(String), + DomainInit(String), + Commitment(String), + Proof(String), + Verification(String), + Merkle(String), + Encoding(String), + Transcript(String), + Reconstruction(String), + InvalidInput(&'static str), +} + +impl fmt::Display for FriBiniusError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use FriBiniusError::*; + match self { + ReedSolomonInit(e) => write!(f, "Reed-Solomon init error: {e}"), + FriParamsInit(e) => write!(f, "FRI params init error: {e}"), + DomainInit(e) => write!(f, "Domain init error: {e}"), + Commitment(e) => write!(f, "Commitment error: {e}"), + Proof(e) => write!(f, "Proof error: {e}"), + Verification(e) => write!(f, "Verification error: {e}"), + Merkle(e) => write!(f, "Merkle error: {e}"), + Encoding(e) => write!(f, "Encoding error: {e}"), + Transcript(e) => write!(f, "Transcript error: {e}"), + Reconstruction(e) => write!(f, "Reconstruction error: {e}"), + InvalidInput(msg) => write!(f, "Invalid input: {msg}"), + } + } +} diff --git a/fri/src/lib.rs b/fri/src/lib.rs new file mode 100644 index 00000000..28e4420f --- /dev/null +++ b/fri/src/lib.rs @@ -0,0 +1,93 @@ +mod config; +mod core; +mod encoding; +mod error; +#[cfg(feature = "testing")] +mod sampling; +mod transcript; + +#[cfg(test)] +mod tests; + +pub use config::FriParamsConfig; +pub use core::{FriBiniusPCS, FriCommitment, FriContext, FriProof}; +pub use encoding::{BytesEncoder, PackedMLE}; +pub use error::FriBiniusError; +#[cfg(feature = "testing")] +pub use sampling::reconstruct_codeword_naive; +pub use transcript::{VerifierTr, transcript_from_bytes, transcript_to_bytes}; + +#[cfg(test)] +pub mod e2e_helpers { + use crate::core::FriCommitOutput; + + use super::*; + use binius_verifier::config::B128; + use rand::{CryptoRng, RngCore}; + + /// Prepare everything from raw bytes up to a commitment: + /// - bytes -> PackedMLE + /// - derive `n_vars` from the data + /// - build PCS + context + /// - commit + pub fn commit_bytes( + mut cfg: FriParamsConfig, + data: &[u8], + ) -> Result< + ( + FriBiniusPCS, + FriContext, + PackedMLE, + FriCommitOutput, + FriCommitment, + ), + FriBiniusError, + > { + // 1. bytes -> packed MLE + let encoder = BytesEncoder::::new(); + let packed = encoder.bytes_to_packed_mle(data)?; + + // 2. fill in n_vars from data (UX: user doesn't have to know it) + cfg.n_vars = packed.total_n_vars; + + // 3. PCS + FRI context + let pcs = FriBiniusPCS::new(cfg); + let ctx = pcs.initialize_fri_context(&packed.packed_mle)?; + + // 4. Commit + let commit_output = pcs.commit::(&packed.packed_mle, &ctx)?; + let digest: [u8; 32] = commit_output + .commitment + .as_slice() + .try_into() + .expect("Binius commitment is 32 bytes"); + + let commitment = FriCommitment { digest }; + + Ok((pcs, ctx, packed, commit_output, commitment)) + } + + /// Convenience: commit + one evaluation proof + verification. + pub fn commit_prove_verify_bytes( + cfg: FriParamsConfig, + data: &[u8], + rng: &mut R, + ) -> Result<(), FriBiniusError> { + let (pcs, ctx, packed, commit_output, _commitment) = commit_bytes(cfg, data)?; + + // 1. Sample evaluation point + let eval_point = pcs.sample_evaluation_point(rng); + + // 2. Generate proof + let proof = pcs.prove::( + &packed.packed_values, + &packed.packed_mle, + &ctx, + &commit_output, + &eval_point, + )?; + + // 3. Verify + pcs.verify(&proof, &ctx) + } +} diff --git a/fri/src/sampling.rs b/fri/src/sampling.rs new file mode 100644 index 00000000..c0e0fac4 --- /dev/null +++ b/fri/src/sampling.rs @@ -0,0 +1,90 @@ +use crate::error::FriBiniusError; +use binius_field::{Field, PackedField}; +use binius_verifier::config::B128; + +#[cfg(feature = "parallel")] +use rayon::prelude::*; + +/// Extremely naive Reed–Solomon erasure reconstruction for testing only. +pub fn reconstruct_codeword_naive( + corrupted_codeword: &mut [B128], + corrupted_indices: &[usize], +) -> Result<(), FriBiniusError> { + let n = corrupted_codeword.len(); + if corrupted_indices.is_empty() || n == 0 { + return Ok(()); + } + + let domain: Vec = (0..n).map(|i| B128::from(i as u128)).collect(); + + let known: Vec<(B128, B128)> = (0..n) + .filter(|i| !corrupted_indices.contains(i)) + .map(|i| (domain[i], corrupted_codeword[i])) + .collect(); + + let k = known.len(); + if k == 0 { + return Err(FriBiniusError::Reconstruction( + "no known points available".into(), + )); + } + + #[cfg(feature = "parallel")] + let reconstructed: Vec<(usize, B128)> = corrupted_indices + .par_iter() + .map(|&missing| { + log::debug!("Reconstructing index {missing}"); + let x_e = domain[missing]; + let mut value = B128::zero(); + + for j in 0..k { + let (x_j, y_j) = known[j]; + let mut l_j = B128::ONE; + for (m, _) in known.iter().enumerate().take(k) { + if m == j { + continue; + } + let (x_m, _) = known[m]; + l_j = l_j * (x_e - x_m) * (x_j - x_m).invert().unwrap(); + } + value += y_j * l_j; + } + + (missing, value) + }) + .collect(); + + #[cfg(feature = "parallel")] + { + for (i, v) in reconstructed { + corrupted_codeword[i] = v; + } + Ok(()) + } + + #[cfg(not(feature = "parallel"))] + { + for &missing in corrupted_indices { + log::debug!("Reconstructing index {missing}"); + let x_e = domain[missing]; + let mut value = B128::zero(); + + for j in 0..k { + let (x_j, y_j) = known[j]; + let mut l_j = B128::ONE; + for m in 0..k { + if m == j { + continue; + } + let (x_m, _) = known[m]; + l_j = l_j * (x_e - x_m) * (x_j - x_m).invert().unwrap(); + } + value += y_j * l_j; + } + + corrupted_codeword[missing] = value; + } + + Ok(()) + } +} diff --git a/fri/src/tests.rs b/fri/src/tests.rs new file mode 100644 index 00000000..8297ca91 --- /dev/null +++ b/fri/src/tests.rs @@ -0,0 +1,105 @@ +#[cfg(test)] +mod e2e_tests { + use crate::e2e_helpers::*; + use crate::{FriBiniusError, FriParamsConfig}; + use rand::{SeedableRng, rngs::StdRng}; + + fn patterned_data(size: usize) -> Vec { + (0..size).map(|i| (i % 256) as u8).collect() + } + + #[test] + fn end_to_end_commit_prove_verify_small() -> Result<(), FriBiniusError> { + let data = patterned_data(1024); // 1 KiB + + let cfg = FriParamsConfig { + log_inv_rate: 1, + num_test_queries: 32, + log_num_shares: 8, + n_vars: 0, // will be filled from data + }; + + let mut rng = StdRng::from_seed([0u8; 32]); + + commit_prove_verify_bytes(cfg, &data, &mut rng) + } + + #[test] + fn end_to_end_commit_and_manual_prove_verify() -> Result<(), FriBiniusError> { + use binius_verifier::config::B128; + + let data = patterned_data(16 * 1024); // 16 KiB + + let cfg = FriParamsConfig { + log_inv_rate: 1, + num_test_queries: 128, + log_num_shares: 8, + n_vars: 0, // auto-filled from data + }; + + let mut rng = StdRng::from_seed([1u8; 32]); + + // --- 1. Commit from bytes --- + let (pcs, ctx, packed, commit_output, _commitment) = commit_bytes(cfg, &data)?; + + // --- 2. Sample evaluation point & compute claim --- + let eval_point = pcs.sample_evaluation_point(&mut rng); + let eval_claim = pcs.calculate_evaluation_claim(&packed.packed_values, &eval_point)?; + + // --- 3. Prove (full control) --- + let proof = pcs.prove::( + &packed.packed_values, + &packed.packed_mle, + &ctx, + &commit_output, + &eval_point, + )?; + + // --- 4. Verify --- + pcs.verify(&proof, &ctx)?; + + // Sanity: proof carries same claim we computed locally + assert_eq!(proof.evaluation_claim, eval_claim); + + Ok(()) + } + + #[test] + fn end_to_end_inclusion_proofs() -> Result<(), FriBiniusError> { + use binius_verifier::config::B128; + use rand::seq::SliceRandom; + + let data = patterned_data(8 * 1024); + + let cfg = FriParamsConfig { + log_inv_rate: 1, + num_test_queries: 64, + log_num_shares: 8, + n_vars: 0, + }; + + let mut rng = StdRng::from_seed([2u8; 32]); + + let (pcs, ctx, _packed, commit_output, commitment) = commit_bytes(cfg, &data)?; + + let codeword_len = commit_output.codeword.len(); + assert!(codeword_len > 0); + + // sample a few random indices + let mut indices: Vec = (0..codeword_len).collect(); + indices.shuffle(&mut rng); + let indices = &indices[..usize::min(16, codeword_len)]; + + for &idx in indices { + let value = commit_output.codeword[idx]; + + // create Merkle inclusion proof + let mut transcript = pcs.inclusion_proof::(&commit_output.committed, idx)?; + + // verify it + pcs.verify_inclusion_proof(&mut transcript, &[value], idx, &ctx, &commitment)?; + } + + Ok(()) + } +} diff --git a/fri/src/transcript.rs b/fri/src/transcript.rs new file mode 100644 index 00000000..eb2d6c99 --- /dev/null +++ b/fri/src/transcript.rs @@ -0,0 +1,38 @@ +use crate::error::FriBiniusError; +use binius_transcript::Buf; +use binius_transcript::VerifierTranscript; +use binius_verifier::config::StdChallenger; + +pub type Challenger = StdChallenger; +pub type VerifierTr = VerifierTranscript; + +pub fn transcript_to_bytes(transcript: &VerifierTr) -> Vec { + // NOTE: this is inherently stateful / consumptive; + // cloning is fine on verifier side. + let mut cloned = transcript.clone(); + let mut message_reader = cloned.message(); + let buffer = message_reader.buffer(); + let remaining = buffer.remaining(); + + if remaining == 0 { + return Vec::new(); + } + + let mut bytes = vec![0u8; remaining]; + buffer.copy_to_slice(&mut bytes); + bytes +} + +pub fn transcript_from_bytes(bytes: Vec) -> VerifierTr { + VerifierTr::new(Challenger::default(), bytes) +} + +#[allow(dead_code)] +pub fn extract_commitment_from_transcript( + transcript: &mut VerifierTr, +) -> Result, FriBiniusError> { + transcript + .message() + .read() + .map_err(|e| FriBiniusError::Transcript(e.to_string())) +} From f9a6f2750f6188ab5487fb9a3e8caafd972f09f0 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Fri, 28 Nov 2025 22:43:26 +0530 Subject: [PATCH 02/16] enhance test coverage for FRI sampling --- fri/src/core.rs | 12 ++-- fri/src/lib.rs | 2 +- fri/src/tests.rs | 159 ++++++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 164 insertions(+), 9 deletions(-) diff --git a/fri/src/core.rs b/fri/src/core.rs index f65a5963..05ae0fe3 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -4,18 +4,18 @@ use crate::transcript::{Challenger, VerifierTr}; use binius_field::{ExtensionField, Field, PackedExtension, PackedField}; use binius_math::{ - BinarySubspace, FieldBuffer, ReedSolomonCode, inner_product::inner_product, multilinear::eq::eq_ind_partial_eval, ntt::{ - NeighborsLastMultiThread, domain_context::{self, GenericPreExpanded}, + NeighborsLastMultiThread, }, + BinarySubspace, FieldBuffer, ReedSolomonCode, }; use binius_prover::{ fri::CommitOutput, hash::parallel_compression::ParallelCompressionAdaptor, - merkle_tree::{MerkleTreeProver, prover::BinaryMerkleTreeProver}, + merkle_tree::{prover::BinaryMerkleTreeProver, MerkleTreeProver}, pcs::OneBitPCSProver, }; use binius_transcript::ProverTranscript; @@ -27,10 +27,10 @@ use binius_verifier::{ pcs::verify as fri_verify, }; -#[cfg(test)] -use rand::{CryptoRng, RngCore}; #[cfg(test)] use binius_field::Random; +#[cfg(test)] +use rand::{CryptoRng, RngCore}; // Concrete merkle prover type we’ll use everywhere. pub type DefaultMerkleProver = @@ -118,7 +118,7 @@ impl FriBiniusPCS { Ok(FriContext { fri_params, ntt }) } - #[cfg(test)] + #[cfg(test)] pub fn sample_evaluation_point(&self, rng: &mut R) -> Vec { let mut point = Vec::with_capacity(self.cfg.n_vars); for _ in 0..self.cfg.n_vars { diff --git a/fri/src/lib.rs b/fri/src/lib.rs index 28e4420f..278b714d 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -15,7 +15,7 @@ pub use encoding::{BytesEncoder, PackedMLE}; pub use error::FriBiniusError; #[cfg(feature = "testing")] pub use sampling::reconstruct_codeword_naive; -pub use transcript::{VerifierTr, transcript_from_bytes, transcript_to_bytes}; +pub use transcript::{transcript_from_bytes, transcript_to_bytes, VerifierTr}; #[cfg(test)] pub mod e2e_helpers { diff --git a/fri/src/tests.rs b/fri/src/tests.rs index 8297ca91..9eab2682 100644 --- a/fri/src/tests.rs +++ b/fri/src/tests.rs @@ -1,8 +1,10 @@ #[cfg(test)] mod e2e_tests { - use crate::e2e_helpers::*; + use crate::core::FriCommitOutput; + use crate::{e2e_helpers::*, FriBiniusPCS, FriCommitment, FriContext}; use crate::{FriBiniusError, FriParamsConfig}; - use rand::{SeedableRng, rngs::StdRng}; + use binius_verifier::config::B128; + use rand::{rngs::StdRng, Rng, SeedableRng}; fn patterned_data(size: usize) -> Vec { (0..size).map(|i| (i % 256) as u8).collect() @@ -102,4 +104,157 @@ mod e2e_tests { Ok(()) } + + #[test] + fn multiple_blobs_da_sampling_succeeds() -> Result<(), FriBiniusError> { + // Simulate a block with multiple blobs of different sizes + let blob_sizes = [1024usize, 8 * 1024, 64 * 1024]; // 1KB, 8KB, 64KB + + // Base FRI config; n_vars will be filled per-blob based on data size + let base_cfg = FriParamsConfig { + log_inv_rate: 1, + num_test_queries: 64, + log_num_shares: 8, + n_vars: 0, + }; + + let mut rng = StdRng::seed_from_u64(42); + + // "Block producer" side: compute commitments for each blob + struct BlobState { + #[allow(dead_code)] + data: Vec, // original blob bytes (node-only) + pcs: FriBiniusPCS, // PCS instance (could be shared across blobs in future) + ctx: FriContext, // FRI context for this blob (depends on length) + commit_output: FriCommitOutput, // full commit output (codeword + merkle) + commitment: FriCommitment, // what goes into the header + } + + let mut blobs = Vec::new(); + + for &size in &blob_sizes { + let data = patterned_data(size); + let cfg = base_cfg; // copy the base config + + let (pcs, ctx, _packed, commit_output, commitment) = commit_bytes(cfg, &data)?; + + blobs.push(BlobState { + data, + pcs, + ctx, + commit_output, + commitment, + }); + } + + // "Light client" side: + // - sees the per-blob commitments + blob sizes (Either from the header or from SummaryTxPostInherent) + // - wants to randomly sample cells in each blob's codeword + // and verify Merkle inclusion proofs. + + for (blob_idx, blob) in blobs.iter().enumerate() { + let codeword_len = blob.commit_output.codeword.len(); + assert!(codeword_len > 0, "blob {blob_idx} has empty codeword"); + + // randomly sampling 10 cells per blob, same as current lc + let samples = usize::min(10, codeword_len); + let mut sampled_indices = Vec::with_capacity(samples); + + // Sample distinct indices at random + while sampled_indices.len() < samples { + let idx = rng.random_range(0..codeword_len); + if !sampled_indices.contains(&idx) { + sampled_indices.push(idx); + } + } + + for &idx in &sampled_indices { + // Node side: provide (value, inclusion proof) for this index + let value = blob.commit_output.codeword[idx]; + let mut proof_transcript = blob + .pcs + .inclusion_proof::(&blob.commit_output.committed, idx)?; + + // Light client side: verify inclusion using only: + // - value + // - index + // - commitment from header + // - per-blob FRI context (which it can reconstruct from size + config) + blob.pcs.verify_inclusion_proof( + &mut proof_transcript, + &[value], + idx, + &blob.ctx, + &blob.commitment, + )?; + } + } + + Ok(()) + } + + #[test] + fn da_sampling_detects_corrupted_data_and_commitment() -> Result<(), FriBiniusError> { + let data = patterned_data(16 * 1024); // 16KB + let cfg = FriParamsConfig { + log_inv_rate: 1, + num_test_queries: 64, + log_num_shares: 8, + n_vars: 0, + }; + + let mut rng = StdRng::seed_from_u64(99); + + let (pcs, ctx, _packed, commit_output, commitment) = commit_bytes(cfg, &data)?; + + let codeword_len = commit_output.codeword.len(); + assert!(codeword_len > 0); + + // Pick a random index to test + let idx = rng.random_range(0..codeword_len); + + let honest_value = commit_output.codeword[idx]; + + // --- 1) Honest proof should verify --- + { + let mut transcript = pcs.inclusion_proof::(&commit_output.committed, idx)?; + pcs.verify_inclusion_proof(&mut transcript, &[honest_value], idx, &ctx, &commitment)?; + } + + // --- 2) Corrupted value should fail --- + { + let mut transcript = pcs.inclusion_proof::(&commit_output.committed, idx)?; + let mut bad_value = honest_value; + // change the original value + bad_value += B128::from(1u128); + + let res = + pcs.verify_inclusion_proof(&mut transcript, &[bad_value], idx, &ctx, &commitment); + + assert!(res.is_err(), "verification should fail for corrupted value"); + } + + // --- 3) Corrupted commitment should fail --- + { + let mut transcript = pcs.inclusion_proof::(&commit_output.committed, idx)?; + + let mut bad_commitment = commitment.clone(); + bad_commitment.digest[0] ^= 0x42; // flip a byte + + let res = pcs.verify_inclusion_proof( + &mut transcript, + &[honest_value], + idx, + &ctx, + &bad_commitment, + ); + + assert!( + res.is_err(), + "verification should fail for corrupted commitment" + ); + } + + Ok(()) + } } From ce6c1c9048b71df4bfa0a50b18f88efa0eae4e95 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Mon, 1 Dec 2025 15:44:37 +0530 Subject: [PATCH 03/16] Updated HeaderExtension to support both KZG & Fri PCS --- core/src/asdr.rs | 1 - core/src/header/extension/fri_v1.rs | 66 ++++ core/src/header/extension/mod.rs | 187 +++++++---- core/src/header/mod.rs | 474 ++++++++++++++-------------- 4 files changed, 429 insertions(+), 299 deletions(-) create mode 100644 core/src/header/extension/fri_v1.rs diff --git a/core/src/asdr.rs b/core/src/asdr.rs index 10052050..d16c96d9 100644 --- a/core/src/asdr.rs +++ b/core/src/asdr.rs @@ -572,7 +572,6 @@ mod tests { } } - type Ex = AppUncheckedExtrinsic; type CEx = CheckedExtrinsic; diff --git a/core/src/header/extension/fri_v1.rs b/core/src/header/extension/fri_v1.rs new file mode 100644 index 00000000..4c21d8f0 --- /dev/null +++ b/core/src/header/extension/fri_v1.rs @@ -0,0 +1,66 @@ +use codec::{Decode, Encode}; +use primitive_types::H256; +use scale_info::TypeInfo; +use sp_std::vec::Vec; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; +#[cfg(feature = "runtime")] +use sp_debug_derive::RuntimeDebug; + +/// Version of Fri/Binius parameters used to interpret size_bytes into +/// codeword length and sampling domain. +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] +pub struct FriParamsVersion(pub u8); + +/// Metadata needed for DA sampling + PCS verification of one blob. +#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] +pub struct FriBlobCommitment { + /// Original blob size in bytes. + pub size_bytes: u64, + + /// Fri PCS commitment (Merkle root of the blob codeword). + pub commitment: H256, +} + +/// DA commitment extension — input to LC sampling & verification. +/// Replaces KZG’s KateCommitment format. +#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] +pub struct HeaderExtension { + /// All blob commitments in canonical block order. + pub blobs: Vec, + + /// Dataroot to be used for bridge & blob inclusion proofs + pub data_root: H256, + + /// Parameter set identifier to decode sampling domain / FRI params. + pub params_version: FriParamsVersion, +} + +impl HeaderExtension { + pub fn data_root(&self) -> H256 { + self.data_root + } + + pub fn get_empty_header(data_root: H256) -> Self { + HeaderExtension { + data_root, + ..Default::default() + } + } + + pub fn get_faulty_header(data_root: H256) -> Self { + // TODO: Differentiate b/w empty & faulty_header + HeaderExtension { + data_root, + ..Default::default() + } + } +} diff --git a/core/src/header/extension/mod.rs b/core/src/header/extension/mod.rs index 2f2ed85e..b2dc7e1f 100644 --- a/core/src/header/extension/mod.rs +++ b/core/src/header/extension/mod.rs @@ -1,4 +1,3 @@ -use crate::{DataLookup, HeaderVersion}; use codec::{Decode, Encode}; use primitive_types::H256; use scale_info::TypeInfo; @@ -6,97 +5,163 @@ use scale_info::TypeInfo; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; #[cfg(feature = "runtime")] -use {sp_debug_derive::RuntimeDebug, sp_runtime_interface::pass_by::PassByCodec}; +use sp_debug_derive::RuntimeDebug; -pub mod v3; +pub mod fri_v1; +// basically only supported kzg header currently pub mod v4; -/// Header extension data. -#[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "runtime", derive(PassByCodec, RuntimeDebug))] -#[repr(u8)] -pub enum HeaderExtension { - V3(v3::HeaderExtension) = 2, - V4(v4::HeaderExtension) = 3, -} +pub mod kzg { + use super::*; + + /// Versioning for KZG header formats. + #[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, TypeInfo)] + #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] + pub enum KzgHeaderVersion { + V4, + } -/// It forwards the call to the inner version of the header. Any invalid version will return the -/// default value or execute an empty block. -macro_rules! forward_to_version { - ($self:ident, $function:ident) => {{ - match $self { - HeaderExtension::V3(ext) => ext.$function(), - HeaderExtension::V4(ext) => ext.$function(), + #[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] + #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] + #[cfg_attr(feature = "runtime", derive(RuntimeDebug))] + #[cfg_attr(not(feature = "runtime"), derive(Debug))] + pub enum KzgHeader { + V4(v4::HeaderExtension), + } + + impl KzgHeader { + pub fn data_root(&self) -> H256 { + match self { + KzgHeader::V4(ext) => ext.data_root(), + } } - }}; - ($self:ident, $function:ident, $arg:expr) => {{ - match $self { - HeaderExtension::V3(ext) => ext.$function($arg), - HeaderExtension::V4(ext) => ext.$function($arg), + pub fn version(&self) -> KzgHeaderVersion { + match self { + KzgHeader::V4(_) => KzgHeaderVersion::V4, + } } - }}; -} -impl HeaderExtension { - pub fn data_root(&self) -> H256 { - forward_to_version!(self, data_root) + pub fn get_empty_header(data_root: H256, version: KzgHeaderVersion) -> Self { + match version { + KzgHeaderVersion::V4 => v4::HeaderExtension::get_empty_header(data_root).into(), + } + } + + pub fn get_faulty_header(data_root: H256, version: KzgHeaderVersion) -> Self { + match version { + KzgHeaderVersion::V4 => v4::HeaderExtension::get_faulty_header(data_root).into(), + } + } } - pub fn app_lookup(&self) -> DataLookup { - match self { - HeaderExtension::V3(ext) => DataLookup::from(&ext.app_lookup), - HeaderExtension::V4(ext) => ext.app_lookup.clone(), + impl From for KzgHeader { + #[inline] + fn from(ext: v4::HeaderExtension) -> Self { + KzgHeader::V4(ext) } } +} + +pub mod fri_header { + use super::*; - pub fn rows(&self) -> u16 { - forward_to_version!(self, rows) + /// Versioning for Fri/Binius header formats. + #[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, TypeInfo)] + #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] + pub enum FriHeaderVersion { + V1, } - pub fn cols(&self) -> u16 { - forward_to_version!(self, cols) + #[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] + #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] + #[cfg_attr(feature = "runtime", derive(RuntimeDebug))] + #[cfg_attr(not(feature = "runtime"), derive(Debug))] + pub enum FriHeader { + V1(fri_v1::HeaderExtension), } - pub fn get_empty_header(data_root: H256, version: HeaderVersion) -> HeaderExtension { - match version { - HeaderVersion::V3 => v3::HeaderExtension::get_empty_header(data_root).into(), - HeaderVersion::V4 => v4::HeaderExtension::get_empty_header(data_root).into(), + impl FriHeader { + pub fn data_root(&self) -> H256 { + match self { + FriHeader::V1(ext) => ext.data_root(), + } + } + + pub fn version(&self) -> FriHeaderVersion { + match self { + FriHeader::V1(_) => FriHeaderVersion::V1, + } + } + + pub fn get_empty_header(data_root: H256, version: FriHeaderVersion) -> Self { + match version { + FriHeaderVersion::V1 => fri_v1::HeaderExtension::get_empty_header(data_root).into(), + } + } + + pub fn get_faulty_header(data_root: H256, version: FriHeaderVersion) -> Self { + match version { + FriHeaderVersion::V1 => { + fri_v1::HeaderExtension::get_faulty_header(data_root).into() + }, + } } } - pub fn get_faulty_header(data_root: H256, version: HeaderVersion) -> HeaderExtension { - match version { - HeaderVersion::V3 => v3::HeaderExtension::get_faulty_header(data_root).into(), - HeaderVersion::V4 => v4::HeaderExtension::get_faulty_header(data_root).into(), + impl From for FriHeader { + #[inline] + fn from(ext: fri_v1::HeaderExtension) -> Self { + FriHeader::V1(ext) } } +} + +/// header extension: *which PCS + which version inside*. +#[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] +#[cfg_attr(not(feature = "runtime"), derive(Debug))] +pub enum HeaderExtension { + Kzg(kzg::KzgHeader), + Fri(fri_header::FriHeader), +} - pub fn get_header_version(&self) -> HeaderVersion { +impl HeaderExtension { + pub fn data_root(&self) -> H256 { match self { - HeaderExtension::V3(_) => HeaderVersion::V3, - HeaderExtension::V4(_) => HeaderVersion::V4, + HeaderExtension::Kzg(h) => h.data_root(), + HeaderExtension::Fri(h) => h.data_root(), } } -} -impl Default for HeaderExtension { - fn default() -> Self { - v3::HeaderExtension::default().into() + pub fn is_kzg(&self) -> bool { + matches!(self, HeaderExtension::Kzg(_)) + } + + pub fn is_fri(&self) -> bool { + matches!(self, HeaderExtension::Fri(_)) } -} -impl From for HeaderExtension { - #[inline] - fn from(ext: v3::HeaderExtension) -> Self { - Self::V3(ext) + pub fn get_empty_kzg(data_root: H256, version: kzg::KzgHeaderVersion) -> Self { + HeaderExtension::Kzg(kzg::KzgHeader::get_empty_header(data_root, version)) + } + + pub fn get_empty_fri(data_root: H256, version: fri_header::FriHeaderVersion) -> Self { + HeaderExtension::Fri(fri_header::FriHeader::get_empty_header(data_root, version)) + } + + pub fn get_faulty_kzg(data_root: H256, version: kzg::KzgHeaderVersion) -> Self { + HeaderExtension::Kzg(kzg::KzgHeader::get_faulty_header(data_root, version)) + } + + pub fn get_faulty_fri(data_root: H256, version: fri_header::FriHeaderVersion) -> Self { + HeaderExtension::Fri(fri_header::FriHeader::get_faulty_header(data_root, version)) } } -impl From for HeaderExtension { - #[inline] - fn from(ext: v4::HeaderExtension) -> Self { - Self::V4(ext) +impl Default for HeaderExtension { + fn default() -> Self { + HeaderExtension::Fri(fri_header::FriHeader::V1(fri_v1::HeaderExtension::default())) } } diff --git a/core/src/header/mod.rs b/core/src/header/mod.rs index 69910e4b..7566ac1f 100644 --- a/core/src/header/mod.rs +++ b/core/src/header/mod.rs @@ -284,240 +284,240 @@ where } } -#[cfg(all(test, feature = "runtime"))] -mod tests { - use codec::Error; - use hex_literal::hex; - use primitive_types::H256; - use sp_runtime::{traits::BlakeTwo256, DigestItem}; - use test_case::test_case; - - use super::*; - use crate::{kate_commitment::v3, AppId, V3DataLookup::DataLookup}; - - type THeader = Header; - - #[test] - fn should_serialize_numbers() { - fn serialize(num: u128) -> String { - let mut v = vec![]; - { - let mut ser = serde_json::Serializer::new(std::io::Cursor::new(&mut v)); - number_serde::serialize(&num, &mut ser).unwrap(); - } - String::from_utf8(v).unwrap() - } - - assert_eq!(serialize(0), "\"0x0\"".to_owned()); - assert_eq!(serialize(1), "\"0x1\"".to_owned()); - assert_eq!( - serialize(u64::max_value() as u128), - "\"0xffffffffffffffff\"".to_owned() - ); - assert_eq!( - serialize(u64::max_value() as u128 + 1), - "\"0x10000000000000000\"".to_owned() - ); - } - - #[test] - fn should_deserialize_number() { - fn deserialize(num: &str) -> u128 { - let mut der = serde_json::Deserializer::new(serde_json::de::StrRead::new(num)); - number_serde::deserialize(&mut der).unwrap() - } - - assert_eq!(deserialize("\"0x0\""), 0); - assert_eq!(deserialize("\"0x1\""), 1); - assert_eq!( - deserialize("\"0xffffffffffffffff\""), - u64::max_value() as u128 - ); - assert_eq!( - deserialize("\"0x10000000000000000\""), - u64::max_value() as u128 + 1 - ); - } - - /// The `commitment.data_root is none`. - fn header_v3() -> THeader { - let commitment = v3::KateCommitment { - commitment: hex!("80e949ebdaf5c13e09649c587c6b1905fb770b4a6843abaac6b413e3a7405d9825ac764db2341db9b7965965073e975980e949ebdaf5c13e09649c587c6b1905fb770b4a6843abaac6b413e3a7405d9825ac764db2341db9b7965965073e9759").to_vec(), - ..Default::default() - }; - let extension = extension::v3::HeaderExtension { - commitment, - ..Default::default() - }; - - THeader { - extension: extension.into(), - ..Default::default() - } - } - - /// It creates a corrupted V3 header and the associated error on decodification. - fn corrupted_header() -> (Vec, Error) { - let mut encoded = header_v3().encode(); - encoded.remove(110); - - let error = THeader::decode(&mut encoded.as_slice()).unwrap_err(); - - (encoded, error) - } - - #[test_case( header_v3().encode().as_ref() => Ok(header_v3()) ; "Decode V3 header")] - #[test_case( corrupted_header().0.as_ref() => Err(corrupted_header().1) ; "Decode corrupted header")] - fn header_decoding(mut encoded_header: &[u8]) -> Result { - Header::decode(&mut encoded_header) - } - - fn header_serde_encode(header: Header) -> String - where - H::Output: TypeInfo, - { - serde_json::to_string(&header).unwrap_or_default() - } - - #[test_case(header_serde_encode(header_v3()) => Ok(header_v3()) ; "Serde V3 header")] - fn header_serde(json_header: String) -> Result { - serde_json::from_str(&json_header).map_err(|serde_err| format!("{}", serde_err)) - } - - fn header() -> (THeader, H256) { - let commitment = v3::KateCommitment { - rows:1, - cols:4, - data_root: hex!("0000000000000000000000000000000000000000000000000000000000000000").into(), - commitment: hex!("ace5bc6a21eef8b28987eb878e0b97b5ae3c8b8e05efe957802dc0008b23327b349f62ec96bcee48bdc30f6bb670f3d1ace5bc6a21eef8b28987eb878e0b97b5ae3c8b8e05efe957802dc0008b23327b349f62ec96bcee48bdc30f6bb670f3d1").into() - }; - let extension = extension::v3::HeaderExtension { - commitment, - app_lookup: DataLookup::from_id_and_len_iter([(AppId(0), 1)].into_iter()) - .expect("Valid DataLookup .qed"), - }; - let digest = Digest { - logs: vec![ - DigestItem::PreRuntime( - hex!("42414245").into(), - hex!("0201000000aa23040500000000").into()), - DigestItem::Seal( - hex!("42414245").into(), - hex!("82a0c0a19f4548adcd575cdc37555b3aeaaae4048a6d39013b98f412420977752459afdc5295d026a4d3476d4d8d3d5e55c3c109235350d9242b4e3132db7e88").into(), - ), - ] - }; - - let header = THeader { - parent_hash: hex!("84a90eef1c4a75c3cbfdf5095450725f924f1a2696946f6d9cf8401f6db99128") - .into(), - number: 368726, - state_root: hex!("586140044543d7bb7471781322bcc2d7e4290716fbac7267e001843162f151d8") - .into(), - extrinsics_root: hex!( - "9ea39eed403afde19c6688785530654a601bb62f0c178c78563933e303e001b6" - ) - .into(), - extension: extension.into(), - digest, - }; - let hash = header.hash(); - - // Check `hash` is what we have in the testnet. - assert_eq!( - hash, - H256(hex!( - "c9941af1cb862db9f2e4c0c94f457d1217b363ecf6e6cc0dbeb5cbfeb35fbc12" - )) - ); - - (header, hash) - } - - fn corrupted_kate_commitment(header_and_hash: (THeader, H256)) -> (THeader, H256) { - let (mut header, hash) = header_and_hash; - - match header.extension { - extension::HeaderExtension::V3(ref mut ext) => { - ext.commitment.commitment = b"invalid commitment v3".to_vec(); - }, - extension::HeaderExtension::V4(ref mut ext) => { - ext.commitment.commitment = b"invalid commitment v4".to_vec(); - }, - }; - - (header, hash) - } - - fn corrupted_kate_data_root(header_and_hash: (THeader, H256)) -> (THeader, H256) { - let (mut header, hash) = header_and_hash; - - match header.extension { - extension::HeaderExtension::V3(ref mut ext) => { - ext.commitment.data_root = H256::repeat_byte(2u8); - }, - extension::HeaderExtension::V4(ref mut ext) => { - ext.commitment.data_root = H256::repeat_byte(2u8); - }, - }; - - (header, hash) - } - - fn corrupted_kate_cols(header_and_hash: (THeader, H256)) -> (THeader, H256) { - let (mut header, hash) = header_and_hash; - - match header.extension { - extension::HeaderExtension::V3(ref mut ext) => { - ext.commitment.cols += 2; - }, - extension::HeaderExtension::V4(ref mut ext) => { - ext.commitment.cols += 2; - }, - }; - - (header, hash) - } - - fn corrupted_kate_rows(header_and_hash: (THeader, H256)) -> (THeader, H256) { - let (mut header, hash) = header_and_hash; - - match header.extension { - extension::HeaderExtension::V3(ref mut ext) => { - ext.commitment.rows += 2; - }, - extension::HeaderExtension::V4(ref mut ext) => { - ext.commitment.rows += 2; - }, - }; - - (header, hash) - } - - fn corrupted_number(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { - header_and_hash.0.number += 1; - header_and_hash - } - - fn corrupted_state_root(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { - header_and_hash.0.state_root.0[0] ^= 0xFFu8; - header_and_hash - } - fn corrupted_parent(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { - header_and_hash.0.parent_hash.0[0] ^= 0xFFu8; - header_and_hash - } - - #[test_case( header() => true ; "Valid header hash")] - #[test_case( corrupted_kate_commitment(header()) => false; "Corrupted commitment in kate")] - #[test_case( corrupted_kate_data_root(header()) => false; "Corrupted data root in kate")] - #[test_case( corrupted_kate_cols(header()) => false; "Corrupted cols in kate")] - #[test_case( corrupted_kate_rows(header()) => false; "Corrupted rows in kate")] - #[test_case( corrupted_number(header()) => false )] - #[test_case( corrupted_state_root(header()) => false )] - #[test_case( corrupted_parent(header()) => false )] - fn header_corruption(header_and_hash: (THeader, H256)) -> bool { - let (header, hash) = header_and_hash; - header.hash() == hash - } -} +// #[cfg(all(test, feature = "runtime"))] +// mod tests { +// use codec::Error; +// use hex_literal::hex; +// use primitive_types::H256; +// use sp_runtime::{traits::BlakeTwo256, DigestItem}; +// use test_case::test_case; + +// use super::*; +// use crate::{kate_commitment::v3, AppId, V3DataLookup::DataLookup}; + +// type THeader = Header; + +// #[test] +// fn should_serialize_numbers() { +// fn serialize(num: u128) -> String { +// let mut v = vec![]; +// { +// let mut ser = serde_json::Serializer::new(std::io::Cursor::new(&mut v)); +// number_serde::serialize(&num, &mut ser).unwrap(); +// } +// String::from_utf8(v).unwrap() +// } + +// assert_eq!(serialize(0), "\"0x0\"".to_owned()); +// assert_eq!(serialize(1), "\"0x1\"".to_owned()); +// assert_eq!( +// serialize(u64::max_value() as u128), +// "\"0xffffffffffffffff\"".to_owned() +// ); +// assert_eq!( +// serialize(u64::max_value() as u128 + 1), +// "\"0x10000000000000000\"".to_owned() +// ); +// } + +// #[test] +// fn should_deserialize_number() { +// fn deserialize(num: &str) -> u128 { +// let mut der = serde_json::Deserializer::new(serde_json::de::StrRead::new(num)); +// number_serde::deserialize(&mut der).unwrap() +// } + +// assert_eq!(deserialize("\"0x0\""), 0); +// assert_eq!(deserialize("\"0x1\""), 1); +// assert_eq!( +// deserialize("\"0xffffffffffffffff\""), +// u64::max_value() as u128 +// ); +// assert_eq!( +// deserialize("\"0x10000000000000000\""), +// u64::max_value() as u128 + 1 +// ); +// } + +// /// The `commitment.data_root is none`. +// fn header_v3() -> THeader { +// let commitment = v3::KateCommitment { +// commitment: hex!("80e949ebdaf5c13e09649c587c6b1905fb770b4a6843abaac6b413e3a7405d9825ac764db2341db9b7965965073e975980e949ebdaf5c13e09649c587c6b1905fb770b4a6843abaac6b413e3a7405d9825ac764db2341db9b7965965073e9759").to_vec(), +// ..Default::default() +// }; +// let extension = extension::v3::HeaderExtension { +// commitment, +// ..Default::default() +// }; + +// THeader { +// extension: extension.into(), +// ..Default::default() +// } +// } + +// /// It creates a corrupted V3 header and the associated error on decodification. +// fn corrupted_header() -> (Vec, Error) { +// let mut encoded = header_v3().encode(); +// encoded.remove(110); + +// let error = THeader::decode(&mut encoded.as_slice()).unwrap_err(); + +// (encoded, error) +// } + +// #[test_case( header_v3().encode().as_ref() => Ok(header_v3()) ; "Decode V3 header")] +// #[test_case( corrupted_header().0.as_ref() => Err(corrupted_header().1) ; "Decode corrupted header")] +// fn header_decoding(mut encoded_header: &[u8]) -> Result { +// Header::decode(&mut encoded_header) +// } + +// fn header_serde_encode(header: Header) -> String +// where +// H::Output: TypeInfo, +// { +// serde_json::to_string(&header).unwrap_or_default() +// } + +// #[test_case(header_serde_encode(header_v3()) => Ok(header_v3()) ; "Serde V3 header")] +// fn header_serde(json_header: String) -> Result { +// serde_json::from_str(&json_header).map_err(|serde_err| format!("{}", serde_err)) +// } + +// fn header() -> (THeader, H256) { +// let commitment = v3::KateCommitment { +// rows:1, +// cols:4, +// data_root: hex!("0000000000000000000000000000000000000000000000000000000000000000").into(), +// commitment: hex!("ace5bc6a21eef8b28987eb878e0b97b5ae3c8b8e05efe957802dc0008b23327b349f62ec96bcee48bdc30f6bb670f3d1ace5bc6a21eef8b28987eb878e0b97b5ae3c8b8e05efe957802dc0008b23327b349f62ec96bcee48bdc30f6bb670f3d1").into() +// }; +// let extension = extension::v3::HeaderExtension { +// commitment, +// app_lookup: DataLookup::from_id_and_len_iter([(AppId(0), 1)].into_iter()) +// .expect("Valid DataLookup .qed"), +// }; +// let digest = Digest { +// logs: vec![ +// DigestItem::PreRuntime( +// hex!("42414245").into(), +// hex!("0201000000aa23040500000000").into()), +// DigestItem::Seal( +// hex!("42414245").into(), +// hex!("82a0c0a19f4548adcd575cdc37555b3aeaaae4048a6d39013b98f412420977752459afdc5295d026a4d3476d4d8d3d5e55c3c109235350d9242b4e3132db7e88").into(), +// ), +// ] +// }; + +// let header = THeader { +// parent_hash: hex!("84a90eef1c4a75c3cbfdf5095450725f924f1a2696946f6d9cf8401f6db99128") +// .into(), +// number: 368726, +// state_root: hex!("586140044543d7bb7471781322bcc2d7e4290716fbac7267e001843162f151d8") +// .into(), +// extrinsics_root: hex!( +// "9ea39eed403afde19c6688785530654a601bb62f0c178c78563933e303e001b6" +// ) +// .into(), +// extension: extension.into(), +// digest, +// }; +// let hash = header.hash(); + +// // Check `hash` is what we have in the testnet. +// assert_eq!( +// hash, +// H256(hex!( +// "c9941af1cb862db9f2e4c0c94f457d1217b363ecf6e6cc0dbeb5cbfeb35fbc12" +// )) +// ); + +// (header, hash) +// } + +// fn corrupted_kate_commitment(header_and_hash: (THeader, H256)) -> (THeader, H256) { +// let (mut header, hash) = header_and_hash; + +// match header.extension { +// extension::HeaderExtension::V3(ref mut ext) => { +// ext.commitment.commitment = b"invalid commitment v3".to_vec(); +// }, +// extension::HeaderExtension::V4(ref mut ext) => { +// ext.commitment.commitment = b"invalid commitment v4".to_vec(); +// }, +// }; + +// (header, hash) +// } + +// fn corrupted_kate_data_root(header_and_hash: (THeader, H256)) -> (THeader, H256) { +// let (mut header, hash) = header_and_hash; + +// match header.extension { +// extension::HeaderExtension::V3(ref mut ext) => { +// ext.commitment.data_root = H256::repeat_byte(2u8); +// }, +// extension::HeaderExtension::V4(ref mut ext) => { +// ext.commitment.data_root = H256::repeat_byte(2u8); +// }, +// }; + +// (header, hash) +// } + +// fn corrupted_kate_cols(header_and_hash: (THeader, H256)) -> (THeader, H256) { +// let (mut header, hash) = header_and_hash; + +// match header.extension { +// extension::HeaderExtension::V3(ref mut ext) => { +// ext.commitment.cols += 2; +// }, +// extension::HeaderExtension::V4(ref mut ext) => { +// ext.commitment.cols += 2; +// }, +// }; + +// (header, hash) +// } + +// fn corrupted_kate_rows(header_and_hash: (THeader, H256)) -> (THeader, H256) { +// let (mut header, hash) = header_and_hash; + +// match header.extension { +// extension::HeaderExtension::V3(ref mut ext) => { +// ext.commitment.rows += 2; +// }, +// extension::HeaderExtension::V4(ref mut ext) => { +// ext.commitment.rows += 2; +// }, +// }; + +// (header, hash) +// } + +// fn corrupted_number(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { +// header_and_hash.0.number += 1; +// header_and_hash +// } + +// fn corrupted_state_root(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { +// header_and_hash.0.state_root.0[0] ^= 0xFFu8; +// header_and_hash +// } +// fn corrupted_parent(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { +// header_and_hash.0.parent_hash.0[0] ^= 0xFFu8; +// header_and_hash +// } + +// #[test_case( header() => true ; "Valid header hash")] +// #[test_case( corrupted_kate_commitment(header()) => false; "Corrupted commitment in kate")] +// #[test_case( corrupted_kate_data_root(header()) => false; "Corrupted data root in kate")] +// #[test_case( corrupted_kate_cols(header()) => false; "Corrupted cols in kate")] +// #[test_case( corrupted_kate_rows(header()) => false; "Corrupted rows in kate")] +// #[test_case( corrupted_number(header()) => false )] +// #[test_case( corrupted_state_root(header()) => false )] +// #[test_case( corrupted_parent(header()) => false )] +// fn header_corruption(header_and_hash: (THeader, H256)) -> bool { +// let (header, hash) = header_and_hash; +// header.hash() == hash +// } +// } From 8bc18ff3b519f42b822865bb7354c7314a714e42 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Mon, 1 Dec 2025 16:54:28 +0530 Subject: [PATCH 04/16] move fri_config to avail-core --- Cargo.lock | 1 + core/src/fri_config.rs | 41 +++++++++++++++++++++++++++++ core/src/header/extension/fri_v1.rs | 8 +----- core/src/lib.rs | 3 +++ fri/Cargo.toml | 6 ++++- fri/src/config.rs | 12 --------- fri/src/core.rs | 2 +- fri/src/lib.rs | 3 +-- 8 files changed, 53 insertions(+), 23 deletions(-) create mode 100644 core/src/fri_config.rs delete mode 100644 fri/src/config.rs diff --git a/Cargo.lock b/Cargo.lock index 17128be1..fb615f07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1741,6 +1741,7 @@ dependencies = [ name = "fri" version = "0.1.0" dependencies = [ + "avail-core", "binius-field", "binius-math", "binius-prover", diff --git a/core/src/fri_config.rs b/core/src/fri_config.rs new file mode 100644 index 00000000..3865f3e3 --- /dev/null +++ b/core/src/fri_config.rs @@ -0,0 +1,41 @@ +use codec::{Decode, Encode}; +use scale_info::TypeInfo; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; +#[cfg(feature = "runtime")] +use sp_debug_derive::RuntimeDebug; +/// Parameters that Avail config / node code will set. +#[derive(Clone, Copy, Debug)] +pub struct FriParamsConfig { + /// log2(1 / Reed–Solomon code rate). + pub log_inv_rate: usize, + /// Number of FRI test queries (soundness parameter). + pub num_test_queries: usize, + /// log2(number of “shares” / repetitions). + pub log_num_shares: usize, + /// Number of multilinear variables (depends on data size). + pub n_vars: usize, +} + +/// Version of Fri/Binius parameters used to interpret size_bytes into +/// codeword length and sampling domain. +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] +pub struct FriParamsVersion(u8); + +impl FriParamsVersion { + /// Map this version to a FriParamsConfig, given `n_vars` + pub fn to_config(self, n_vars: usize) -> FriParamsConfig { + match self.0 { + 0 => FriParamsConfig { + log_inv_rate: 1, + num_test_queries: 128, + log_num_shares: 80, + n_vars, + }, + _ => panic!("Unsupported FriParamsVersion {}", self.0), + } + } +} diff --git a/core/src/header/extension/fri_v1.rs b/core/src/header/extension/fri_v1.rs index 4c21d8f0..9484e649 100644 --- a/core/src/header/extension/fri_v1.rs +++ b/core/src/header/extension/fri_v1.rs @@ -1,3 +1,4 @@ +use crate::FriParamsVersion; use codec::{Decode, Encode}; use primitive_types::H256; use scale_info::TypeInfo; @@ -8,13 +9,6 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "runtime")] use sp_debug_derive::RuntimeDebug; -/// Version of Fri/Binius parameters used to interpret size_bytes into -/// codeword length and sampling domain. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] -pub struct FriParamsVersion(pub u8); - /// Metadata needed for DA sampling + PCS verification of one blob. #[derive(Clone, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] diff --git a/core/src/lib.rs b/core/src/lib.rs index a416a866..875303ee 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -45,6 +45,9 @@ pub mod traits; pub mod keccak256; pub use keccak256::Keccak256; +pub mod fri_config; +pub use fri_config::*; + pub mod data_proof; pub use data_proof::DataProof; diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 082dcb6e..833e4b96 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -10,8 +10,9 @@ binius-math = { workspace = true } binius-field = { workspace = true } binius-transcript = { workspace = true } -rayon = { version = "1.10", optional = true } +avail-core = { path = "../core", default-features = false } log = { workspace = true, optional = true } +rayon = { version = "1.10", optional = true } [dev-dependencies] rand = { version = "0.9.1", default-features = false, features = ["std", "std_rng"] } @@ -21,6 +22,7 @@ default = ["std"] # This crate is node-side only currently std = [ + "avail-core/std", "log", ] @@ -28,3 +30,5 @@ std = [ parallel = ["std", "rayon"] testing = [] +serde = ["avail-core/serde"] +runtime = ["avail-core/runtime"] diff --git a/fri/src/config.rs b/fri/src/config.rs deleted file mode 100644 index dcb5cd7d..00000000 --- a/fri/src/config.rs +++ /dev/null @@ -1,12 +0,0 @@ -/// Parameters that Avail config / node code will set. -#[derive(Clone, Copy, Debug)] -pub struct FriParamsConfig { - /// log2(1 / Reed–Solomon code rate). - pub log_inv_rate: usize, - /// Number of FRI test queries (soundness parameter). - pub num_test_queries: usize, - /// log2(number of “shares” / repetitions). - pub log_num_shares: usize, - /// Number of multilinear variables (depends on data size). - pub n_vars: usize, -} diff --git a/fri/src/core.rs b/fri/src/core.rs index 05ae0fe3..fa9fb640 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -1,6 +1,6 @@ -use crate::config::FriParamsConfig; use crate::error::FriBiniusError; use crate::transcript::{Challenger, VerifierTr}; +use avail_core::FriParamsConfig; use binius_field::{ExtensionField, Field, PackedExtension, PackedField}; use binius_math::{ diff --git a/fri/src/lib.rs b/fri/src/lib.rs index 278b714d..c7b4f304 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -1,4 +1,3 @@ -mod config; mod core; mod encoding; mod error; @@ -9,7 +8,7 @@ mod transcript; #[cfg(test)] mod tests; -pub use config::FriParamsConfig; +pub use avail_core::FriParamsConfig; pub use core::{FriBiniusPCS, FriCommitment, FriContext, FriProof}; pub use encoding::{BytesEncoder, PackedMLE}; pub use error::FriBiniusError; From 25ef6317d542e911ecca3265ef4e955d4496d045 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Mon, 1 Dec 2025 17:52:10 +0530 Subject: [PATCH 05/16] update fri tests --- Cargo.lock | 2 + core/src/fri_config.rs | 2 +- fri/Cargo.toml | 16 +++---- fri/src/tests.rs | 104 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 115 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fb615f07..ddf6c215 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1748,6 +1748,8 @@ dependencies = [ "binius-transcript", "binius-verifier", "log", + "parity-scale-codec", + "primitive-types", "rand 0.9.2", "rayon", ] diff --git a/core/src/fri_config.rs b/core/src/fri_config.rs index 3865f3e3..03f7e83b 100644 --- a/core/src/fri_config.rs +++ b/core/src/fri_config.rs @@ -23,7 +23,7 @@ pub struct FriParamsConfig { #[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "runtime", derive(RuntimeDebug))] -pub struct FriParamsVersion(u8); +pub struct FriParamsVersion(pub u8); impl FriParamsVersion { /// Map this version to a FriParamsConfig, given `n_vars` diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 833e4b96..77063834 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -15,20 +15,20 @@ log = { workspace = true, optional = true } rayon = { version = "1.10", optional = true } [dev-dependencies] +avail-core = { path = "../core", default-features = false, features = ["runtime"]} +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } rand = { version = "0.9.1", default-features = false, features = ["std", "std_rng"] } +primitive-types = { workspace = true } [features] -default = ["std"] +default = [ "std" ] # This crate is node-side only currently -std = [ - "avail-core/std", - "log", -] +std = [ "avail-core/std", "log" ] # Enable rayon-based parallel code paths -parallel = ["std", "rayon"] +parallel = [ "rayon", "std" ] testing = [] -serde = ["avail-core/serde"] -runtime = ["avail-core/runtime"] +serde = [ "avail-core/serde" ] +runtime = [ "avail-core/runtime" ] diff --git a/fri/src/tests.rs b/fri/src/tests.rs index 9eab2682..decbeaab 100644 --- a/fri/src/tests.rs +++ b/fri/src/tests.rs @@ -1,9 +1,18 @@ #[cfg(test)] mod e2e_tests { use crate::core::FriCommitOutput; + pub use crate::encoding::BytesEncoder; use crate::{e2e_helpers::*, FriBiniusPCS, FriCommitment, FriContext}; use crate::{FriBiniusError, FriParamsConfig}; + use avail_core::header::extension::{ + fri_header::FriHeader, + fri_v1::{FriBlobCommitment, HeaderExtension as FriV1HeaderExtension}, + HeaderExtension as CoreHeaderExtension, + }; + use avail_core::FriParamsVersion; use binius_verifier::config::B128; + use codec::{Decode, Encode}; + use primitive_types::H256; use rand::{rngs::StdRng, Rng, SeedableRng}; fn patterned_data(size: usize) -> Vec { @@ -105,6 +114,18 @@ mod e2e_tests { Ok(()) } + #[test] + fn fri_params_version_zero_maps_to_expected_config() { + let v = FriParamsVersion(0); + let n_vars = 17; + let cfg = v.to_config(n_vars); + + assert_eq!(cfg.log_inv_rate, 1); + assert_eq!(cfg.num_test_queries, 128); + assert_eq!(cfg.log_num_shares, 80); + assert_eq!(cfg.n_vars, n_vars); + } + #[test] fn multiple_blobs_da_sampling_succeeds() -> Result<(), FriBiniusError> { // Simulate a block with multiple blobs of different sizes @@ -257,4 +278,87 @@ mod e2e_tests { Ok(()) } + + #[test] + fn fri_header_drives_fri_pcs_end_to_end() { + let blob_size = 1024 * 1024; // 1 MiB + let blob_bytes: Vec = (0..blob_size).map(|i| (i % 251) as u8).collect(); + + let packed = BytesEncoder::::new() + .bytes_to_packed_mle(&blob_bytes) + .expect("bytes_to_packed_mle must succeed"); + let n_vars = packed.total_n_vars; + + let params_version = FriParamsVersion(0); + let cfg = params_version.to_config(n_vars); + + let pcs = FriBiniusPCS::new(cfg); + let ctx = pcs + .initialize_fri_context(&packed.packed_mle) + .expect("initialize_fri_context must succeed"); + + let commit_output = pcs + .commit(&packed.packed_mle, &ctx) + .expect("commit must succeed"); + + // Turn Merkle root into H256 for header storage + let commitment_bytes: [u8; 32] = commit_output + .commitment + .as_slice() + .try_into() + .expect("commitment should be 32 bytes"); + let real_commitment = H256(commitment_bytes); + + // In the real node, data_root would be merkle root of raw blobs; + // here we just fake one for testing. + let data_root = H256::repeat_byte(0xAB); + + let blob_meta = FriBlobCommitment { + size_bytes: blob_size as u64, + commitment: real_commitment, + }; + + let fri_v1_header = FriV1HeaderExtension { + blobs: vec![blob_meta.clone()], + data_root, + params_version, + }; + + // Wrap in versioned FriHeader + top-level HeaderExtension + let core_header = CoreHeaderExtension::Fri(FriHeader::V1(fri_v1_header.clone())); + + let encoded = core_header.encode(); + let decoded = + CoreHeaderExtension::decode(&mut &encoded[..]).expect("SCALE decode must succeed"); + + assert!(decoded.is_fri()); + assert_eq!(decoded.data_root(), data_root); + + // Extract inner Fri v1 header again + let inner = match decoded { + CoreHeaderExtension::Fri(FriHeader::V1(h)) => h, + _ => panic!("expected Fri V1 header"), + }; + + assert_eq!(inner.params_version.0, 0); + assert_eq!(inner.blobs.len(), 1); + assert_eq!(inner.blobs[0].size_bytes, blob_size as u64); + assert_eq!(inner.blobs[0].commitment, real_commitment); + + let mut rng = StdRng::from_seed([7u8; 32]); + let eval_point = pcs.sample_evaluation_point(&mut rng); + + let proof = pcs + .prove( + &packed.packed_values, + &packed.packed_mle, + &ctx, + &commit_output, + &eval_point, + ) + .expect("prove must succeed"); + + pcs.verify(&proof, &ctx) + .expect("Fri evaluation proof must verify"); + } } From b98f93a377109fc901eec8809356062f873b91a3 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Mon, 1 Dec 2025 19:52:52 +0530 Subject: [PATCH 06/16] impl PassBy on HeaderExtension --- core/src/header/extension/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/header/extension/mod.rs b/core/src/header/extension/mod.rs index b2dc7e1f..b0c64a15 100644 --- a/core/src/header/extension/mod.rs +++ b/core/src/header/extension/mod.rs @@ -5,7 +5,7 @@ use scale_info::TypeInfo; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; #[cfg(feature = "runtime")] -use sp_debug_derive::RuntimeDebug; +use {sp_debug_derive::RuntimeDebug, sp_runtime_interface::pass_by::PassByCodec}; pub mod fri_v1; // basically only supported kzg header currently @@ -120,7 +120,7 @@ pub mod fri_header { /// header extension: *which PCS + which version inside*. #[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] +#[cfg_attr(feature = "runtime", derive(PassByCodec, RuntimeDebug))] #[cfg_attr(not(feature = "runtime"), derive(Debug))] pub enum HeaderExtension { Kzg(kzg::KzgHeader), From f83a70449713dc7995f0078bd9b232df4d45b12e Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Mon, 1 Dec 2025 19:59:35 +0530 Subject: [PATCH 07/16] update crate name to avail-fri --- Cargo.lock | 34 +++++++++++++++++----------------- fri/Cargo.toml | 2 +- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ddf6c215..41552413 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -547,6 +547,23 @@ dependencies = [ "trybuild", ] +[[package]] +name = "avail-fri" +version = "0.1.0" +dependencies = [ + "avail-core", + "binius-field", + "binius-math", + "binius-prover", + "binius-transcript", + "binius-verifier", + "log", + "parity-scale-codec", + "primitive-types", + "rand 0.9.2", + "rayon", +] + [[package]] name = "backtrace" version = "0.3.69" @@ -1737,23 +1754,6 @@ dependencies = [ "syn 2.0.96", ] -[[package]] -name = "fri" -version = "0.1.0" -dependencies = [ - "avail-core", - "binius-field", - "binius-math", - "binius-prover", - "binius-transcript", - "binius-verifier", - "log", - "parity-scale-codec", - "primitive-types", - "rand 0.9.2", - "rayon", -] - [[package]] name = "fs-err" version = "2.11.0" diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 77063834..39a846d9 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "fri" +name = "avail-fri" version = "0.1.0" edition = "2021" From bbe220646e40cbbc1c939db833922a923c4e37f9 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Tue, 2 Dec 2025 15:12:41 +0530 Subject: [PATCH 08/16] fix impls & exports --- build_test.sh | 7 ++++++- core/src/fri_config.rs | 4 ++-- core/src/header/extension/mod.rs | 2 ++ fri/Cargo.toml | 4 ++-- fri/src/core.rs | 7 +++++-- fri/src/lib.rs | 10 +++++----- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/build_test.sh b/build_test.sh index 77a327d0..a112af04 100755 --- a/build_test.sh +++ b/build_test.sh @@ -38,4 +38,9 @@ cargo check --target wasm32-unknown-unknown --no-default-features --features "se # Fri cd ../fri cargo check -cargo check --no-default-features --features "std" \ No newline at end of file +cargo check --no-default-features +cargo check --no-default-features --features "serde" +cargo check --no-default-features --features "std" +cargo check --no-default-features --features "std, serde" +cargo check --target wasm32-unknown-unknown --no-default-features +cargo check --target wasm32-unknown-unknown --no-default-features --features "serde" \ No newline at end of file diff --git a/core/src/fri_config.rs b/core/src/fri_config.rs index 03f7e83b..8b4faf1c 100644 --- a/core/src/fri_config.rs +++ b/core/src/fri_config.rs @@ -4,7 +4,7 @@ use scale_info::TypeInfo; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; #[cfg(feature = "runtime")] -use sp_debug_derive::RuntimeDebug; +use {sp_debug_derive::RuntimeDebug, sp_runtime_interface::pass_by::PassByCodec}; /// Parameters that Avail config / node code will set. #[derive(Clone, Copy, Debug)] pub struct FriParamsConfig { @@ -22,7 +22,7 @@ pub struct FriParamsConfig { /// codeword length and sampling domain. #[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, Default, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "runtime", derive(RuntimeDebug))] +#[cfg_attr(feature = "runtime", derive(PassByCodec, RuntimeDebug))] pub struct FriParamsVersion(pub u8); impl FriParamsVersion { diff --git a/core/src/header/extension/mod.rs b/core/src/header/extension/mod.rs index b0c64a15..8d885136 100644 --- a/core/src/header/extension/mod.rs +++ b/core/src/header/extension/mod.rs @@ -17,6 +17,7 @@ pub mod kzg { /// Versioning for KZG header formats. #[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] + #[cfg_attr(feature = "runtime", derive(PassByCodec, RuntimeDebug))] pub enum KzgHeaderVersion { V4, } @@ -69,6 +70,7 @@ pub mod fri_header { /// Versioning for Fri/Binius header formats. #[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] + #[cfg_attr(feature = "runtime", derive(PassByCodec, RuntimeDebug))] pub enum FriHeaderVersion { V1, } diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 39a846d9..87c88285 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -11,7 +11,7 @@ binius-field = { workspace = true } binius-transcript = { workspace = true } avail-core = { path = "../core", default-features = false } -log = { workspace = true, optional = true } +log = { workspace = true, default-features = false } rayon = { version = "1.10", optional = true } [dev-dependencies] @@ -24,7 +24,7 @@ primitive-types = { workspace = true } default = [ "std" ] # This crate is node-side only currently -std = [ "avail-core/std", "log" ] +std = [ "avail-core/std", "log/std" ] # Enable rayon-based parallel code paths parallel = [ "rayon", "std" ] diff --git a/fri/src/core.rs b/fri/src/core.rs index fa9fb640..05d85141 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -1,6 +1,5 @@ use crate::error::FriBiniusError; use crate::transcript::{Challenger, VerifierTr}; -use avail_core::FriParamsConfig; use binius_field::{ExtensionField, Field, PackedExtension, PackedField}; use binius_math::{ @@ -20,13 +19,17 @@ use binius_prover::{ }; use binius_transcript::ProverTranscript; use binius_verifier::{ - config::{B1, B128}, + config::B1, fri::FRIParams, hash::{StdCompression, StdDigest}, merkle_tree::MerkleTreeScheme, pcs::verify as fri_verify, }; +// TODO: re-export some of the common types to be sued by downstream +pub use binius_verifier::config::B128; +pub use avail_core::{FriParamsConfig, FriParamsVersion}; + #[cfg(test)] use binius_field::Random; #[cfg(test)] diff --git a/fri/src/lib.rs b/fri/src/lib.rs index c7b4f304..4e967ac4 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -1,9 +1,9 @@ -mod core; -mod encoding; -mod error; +pub mod core; +pub mod encoding; +pub mod error; #[cfg(feature = "testing")] -mod sampling; -mod transcript; +pub mod sampling; +pub mod transcript; #[cfg(test)] mod tests; From 92d64d4ea0d98a69532dccb692d779dd0ad38d68 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Tue, 2 Dec 2025 22:49:01 +0530 Subject: [PATCH 09/16] refactor: change fri commitment type from H256 to Vec --- core/src/header/extension/fri_v1.rs | 2 +- core/src/header/extension/mod.rs | 27 ++++++++++++++++++++------- fri/src/tests.rs | 13 ++++--------- 3 files changed, 25 insertions(+), 17 deletions(-) diff --git a/core/src/header/extension/fri_v1.rs b/core/src/header/extension/fri_v1.rs index 9484e649..9c3c525f 100644 --- a/core/src/header/extension/fri_v1.rs +++ b/core/src/header/extension/fri_v1.rs @@ -18,7 +18,7 @@ pub struct FriBlobCommitment { pub size_bytes: u64, /// Fri PCS commitment (Merkle root of the blob codeword). - pub commitment: H256, + pub commitment: Vec, } /// DA commitment extension — input to LC sampling & verification. diff --git a/core/src/header/extension/mod.rs b/core/src/header/extension/mod.rs index 8d885136..c17d73fc 100644 --- a/core/src/header/extension/mod.rs +++ b/core/src/header/extension/mod.rs @@ -64,7 +64,7 @@ pub mod kzg { } } -pub mod fri_header { +pub mod fri { use super::*; /// Versioning for Fri/Binius header formats. @@ -119,6 +119,12 @@ pub mod fri_header { } } +#[derive(Clone, Copy, Eq, PartialEq, Debug, Encode, Decode, TypeInfo)] +pub enum CommitmentScheme { + Kzg, + Fri, +} + /// header extension: *which PCS + which version inside*. #[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -126,7 +132,7 @@ pub mod fri_header { #[cfg_attr(not(feature = "runtime"), derive(Debug))] pub enum HeaderExtension { Kzg(kzg::KzgHeader), - Fri(fri_header::FriHeader), + Fri(fri::FriHeader), } impl HeaderExtension { @@ -145,25 +151,32 @@ impl HeaderExtension { matches!(self, HeaderExtension::Fri(_)) } + pub fn commitment_scheme(&self) -> CommitmentScheme { + match self { + HeaderExtension::Fri(_) => CommitmentScheme::Fri, + HeaderExtension::Kzg(_) => CommitmentScheme::Kzg, + } + } + pub fn get_empty_kzg(data_root: H256, version: kzg::KzgHeaderVersion) -> Self { HeaderExtension::Kzg(kzg::KzgHeader::get_empty_header(data_root, version)) } - pub fn get_empty_fri(data_root: H256, version: fri_header::FriHeaderVersion) -> Self { - HeaderExtension::Fri(fri_header::FriHeader::get_empty_header(data_root, version)) + pub fn get_empty_fri(data_root: H256, version: fri::FriHeaderVersion) -> Self { + HeaderExtension::Fri(fri::FriHeader::get_empty_header(data_root, version)) } pub fn get_faulty_kzg(data_root: H256, version: kzg::KzgHeaderVersion) -> Self { HeaderExtension::Kzg(kzg::KzgHeader::get_faulty_header(data_root, version)) } - pub fn get_faulty_fri(data_root: H256, version: fri_header::FriHeaderVersion) -> Self { - HeaderExtension::Fri(fri_header::FriHeader::get_faulty_header(data_root, version)) + pub fn get_faulty_fri(data_root: H256, version: fri::FriHeaderVersion) -> Self { + HeaderExtension::Fri(fri::FriHeader::get_faulty_header(data_root, version)) } } impl Default for HeaderExtension { fn default() -> Self { - HeaderExtension::Fri(fri_header::FriHeader::V1(fri_v1::HeaderExtension::default())) + HeaderExtension::Fri(fri::FriHeader::V1(fri_v1::HeaderExtension::default())) } } diff --git a/fri/src/tests.rs b/fri/src/tests.rs index decbeaab..6f0c0915 100644 --- a/fri/src/tests.rs +++ b/fri/src/tests.rs @@ -5,7 +5,7 @@ mod e2e_tests { use crate::{e2e_helpers::*, FriBiniusPCS, FriCommitment, FriContext}; use crate::{FriBiniusError, FriParamsConfig}; use avail_core::header::extension::{ - fri_header::FriHeader, + fri::FriHeader, fri_v1::{FriBlobCommitment, HeaderExtension as FriV1HeaderExtension}, HeaderExtension as CoreHeaderExtension, }; @@ -302,12 +302,7 @@ mod e2e_tests { .expect("commit must succeed"); // Turn Merkle root into H256 for header storage - let commitment_bytes: [u8; 32] = commit_output - .commitment - .as_slice() - .try_into() - .expect("commitment should be 32 bytes"); - let real_commitment = H256(commitment_bytes); + let commitment_bytes = commit_output.commitment.clone(); // In the real node, data_root would be merkle root of raw blobs; // here we just fake one for testing. @@ -315,7 +310,7 @@ mod e2e_tests { let blob_meta = FriBlobCommitment { size_bytes: blob_size as u64, - commitment: real_commitment, + commitment: commitment_bytes.clone(), }; let fri_v1_header = FriV1HeaderExtension { @@ -343,7 +338,7 @@ mod e2e_tests { assert_eq!(inner.params_version.0, 0); assert_eq!(inner.blobs.len(), 1); assert_eq!(inner.blobs[0].size_bytes, blob_size as u64); - assert_eq!(inner.blobs[0].commitment, real_commitment); + assert_eq!(inner.blobs[0].commitment, commitment_bytes); let mut rng = StdRng::from_seed([7u8; 32]); let eval_point = pcs.sample_evaluation_point(&mut rng); From 26a798096db0317361c785fc6ee5c68d37910d38 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Thu, 4 Dec 2025 08:32:46 +0530 Subject: [PATCH 10/16] fmt --- core/src/header/extension/mod.rs | 4 ++-- fri/src/core.rs | 16 ++++++++-------- fri/src/tests.rs | 17 ++++++----------- 3 files changed, 16 insertions(+), 21 deletions(-) diff --git a/core/src/header/extension/mod.rs b/core/src/header/extension/mod.rs index c17d73fc..c515187a 100644 --- a/core/src/header/extension/mod.rs +++ b/core/src/header/extension/mod.rs @@ -121,8 +121,8 @@ pub mod fri { #[derive(Clone, Copy, Eq, PartialEq, Debug, Encode, Decode, TypeInfo)] pub enum CommitmentScheme { - Kzg, - Fri, + Kzg, + Fri, } /// header extension: *which PCS + which version inside*. diff --git a/fri/src/core.rs b/fri/src/core.rs index 05d85141..eb0deb08 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -27,8 +27,8 @@ use binius_verifier::{ }; // TODO: re-export some of the common types to be sued by downstream -pub use binius_verifier::config::B128; pub use avail_core::{FriParamsConfig, FriParamsVersion}; +pub use binius_verifier::config::B128; #[cfg(test)] use binius_field::Random; @@ -49,13 +49,13 @@ pub type MerkleCommitted = = CommitOutput, MerkleCommitted<

::Scalar>>; -/// Commitment object that we can serialize. +/// Commitment #[derive(Clone, Debug)] pub struct FriCommitment { pub digest: [u8; 32], } -/// Evaluation proof object that we can propagate. +/// Evaluation proof #[derive(Clone, Debug)] pub struct FriProof { pub commitment: FriCommitment, @@ -177,10 +177,10 @@ impl FriBiniusPCS { where P: PackedField + PackedExtension + PackedExtension, { - // 1) Compute evaluation claim from scalar values + // Compute evaluation claim from scalar values let evaluation_claim = self.calculate_evaluation_claim(values, evaluation_point)?; - // 2) Set up PCS prover and transcript + // Set up PCS prover and transcript let pcs = OneBitPCSProver::new(&ctx.ntt, &self.merkle_prover, &ctx.fri_params); let mut prover_transcript = ProverTranscript::new(Challenger::default()); @@ -189,7 +189,7 @@ impl FriBiniusPCS { .message() .write_bytes(&commit_output.commitment); - // 3) Run FRI proof generation + // Run FRI proof generation pcs.prove( &commit_output.codeword, &commit_output.committed, @@ -199,11 +199,11 @@ impl FriBiniusPCS { ) .map_err(|e| FriBiniusError::Proof(e.to_string()))?; - // 4) Turn prover transcript into verifier transcript and serialize it + // Turn prover transcript into verifier transcript and serialize it let verifier_transcript: VerifierTr = prover_transcript.into_verifier(); let transcript_bytes = crate::transcript::transcript_to_bytes(&verifier_transcript); - // 5) Extract commitment digest as [u8; 32] + // Extract commitment digest as [u8; 32] let digest: [u8; 32] = commit_output .commitment .as_slice() diff --git a/fri/src/tests.rs b/fri/src/tests.rs index 6f0c0915..3550699f 100644 --- a/fri/src/tests.rs +++ b/fri/src/tests.rs @@ -45,19 +45,15 @@ mod e2e_tests { log_inv_rate: 1, num_test_queries: 128, log_num_shares: 8, - n_vars: 0, // auto-filled from data + n_vars: 0, }; let mut rng = StdRng::from_seed([1u8; 32]); - - // --- 1. Commit from bytes --- let (pcs, ctx, packed, commit_output, _commitment) = commit_bytes(cfg, &data)?; - // --- 2. Sample evaluation point & compute claim --- let eval_point = pcs.sample_evaluation_point(&mut rng); let eval_claim = pcs.calculate_evaluation_claim(&packed.packed_values, &eval_point)?; - // --- 3. Prove (full control) --- let proof = pcs.prove::( &packed.packed_values, &packed.packed_mle, @@ -66,7 +62,6 @@ mod e2e_tests { &eval_point, )?; - // --- 4. Verify --- pcs.verify(&proof, &ctx)?; // Sanity: proof carries same claim we computed locally @@ -141,7 +136,7 @@ mod e2e_tests { let mut rng = StdRng::seed_from_u64(42); - // "Block producer" side: compute commitments for each blob + // Block producer side: compute commitments for each blob struct BlobState { #[allow(dead_code)] data: Vec, // original blob bytes (node-only) @@ -168,7 +163,7 @@ mod e2e_tests { }); } - // "Light client" side: + // Light client side: // - sees the per-blob commitments + blob sizes (Either from the header or from SummaryTxPostInherent) // - wants to randomly sample cells in each blob's codeword // and verify Merkle inclusion proofs. @@ -236,13 +231,13 @@ mod e2e_tests { let honest_value = commit_output.codeword[idx]; - // --- 1) Honest proof should verify --- + // Honest proof should verify { let mut transcript = pcs.inclusion_proof::(&commit_output.committed, idx)?; pcs.verify_inclusion_proof(&mut transcript, &[honest_value], idx, &ctx, &commitment)?; } - // --- 2) Corrupted value should fail --- + // Corrupted value should fail { let mut transcript = pcs.inclusion_proof::(&commit_output.committed, idx)?; let mut bad_value = honest_value; @@ -255,7 +250,7 @@ mod e2e_tests { assert!(res.is_err(), "verification should fail for corrupted value"); } - // --- 3) Corrupted commitment should fail --- + // Corrupted commitment should fail { let mut transcript = pcs.inclusion_proof::(&commit_output.committed, idx)?; From aaa537b8b5ea1ef43f8e13d5b67c0282fbc80808 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Thu, 4 Dec 2025 22:39:59 +0530 Subject: [PATCH 11/16] Update initialize_fri_context interface --- fri/src/core.rs | 12 +++++++----- fri/src/lib.rs | 16 ++++++++-------- fri/src/tests.rs | 2 +- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/fri/src/core.rs b/fri/src/core.rs index eb0deb08..f43a9270 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -87,21 +87,23 @@ impl FriBiniusPCS { pub fn initialize_fri_context

( &self, - packed_buffer: &FieldBuffer

, + mle_log_len: usize, ) -> Result where P: PackedField + PackedExtension + PackedExtension, { - let committed_rs_code = - ReedSolomonCode::::new(packed_buffer.log_len(), self.cfg.log_inv_rate) - .map_err(|e| FriBiniusError::ReedSolomonInit(e.to_string()))?; + // Reed–Solomon code over B128; parameterized only by log-length + inv-rate. + let committed_rs_code = ReedSolomonCode::::new(mle_log_len, self.cfg.log_inv_rate) + .map_err(|e| FriBiniusError::ReedSolomonInit(e.to_string()))?; let fri_log_batch_size = 0; + // FRI arities depend on packing width and log-length, not the data itself. let fri_arities = if P::LOG_WIDTH == 2 { + // small-width special case vec![2, 2] } else { - vec![2; packed_buffer.log_len() / 2] + vec![2; mle_log_len / 2] }; let fri_params = FRIParams::new( diff --git a/fri/src/lib.rs b/fri/src/lib.rs index 4e967ac4..6c39b7b0 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -42,18 +42,18 @@ pub mod e2e_helpers { ), FriBiniusError, > { - // 1. bytes -> packed MLE + // bytes -> packed MLE let encoder = BytesEncoder::::new(); let packed = encoder.bytes_to_packed_mle(data)?; - // 2. fill in n_vars from data (UX: user doesn't have to know it) + // fill in n_vars from data (UX: user doesn't have to know it) cfg.n_vars = packed.total_n_vars; - // 3. PCS + FRI context + // PCS + FRI context let pcs = FriBiniusPCS::new(cfg); - let ctx = pcs.initialize_fri_context(&packed.packed_mle)?; + let ctx = pcs.initialize_fri_context::(packed.packed_mle.log_len())?; - // 4. Commit + // Commit let commit_output = pcs.commit::(&packed.packed_mle, &ctx)?; let digest: [u8; 32] = commit_output .commitment @@ -74,10 +74,10 @@ pub mod e2e_helpers { ) -> Result<(), FriBiniusError> { let (pcs, ctx, packed, commit_output, _commitment) = commit_bytes(cfg, data)?; - // 1. Sample evaluation point + // Sample evaluation point let eval_point = pcs.sample_evaluation_point(rng); - // 2. Generate proof + // Generate proof let proof = pcs.prove::( &packed.packed_values, &packed.packed_mle, @@ -86,7 +86,7 @@ pub mod e2e_helpers { &eval_point, )?; - // 3. Verify + // Verify pcs.verify(&proof, &ctx) } } diff --git a/fri/src/tests.rs b/fri/src/tests.rs index 3550699f..69dc5e22 100644 --- a/fri/src/tests.rs +++ b/fri/src/tests.rs @@ -289,7 +289,7 @@ mod e2e_tests { let pcs = FriBiniusPCS::new(cfg); let ctx = pcs - .initialize_fri_context(&packed.packed_mle) + .initialize_fri_context::(packed.packed_mle.log_len()) .expect("initialize_fri_context must succeed"); let commit_output = pcs From b81bf90954ed22b74ad7f6fc3449f14ab21ec7f8 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Tue, 9 Dec 2025 14:33:37 +0530 Subject: [PATCH 12/16] Add benchmarking for fri-pcs --- Cargo.lock | 108 ++++++++++++-- Cargo.toml | 10 +- fri/Cargo.toml | 8 ++ fri/benches/fri_benches.rs | 287 +++++++++++++++++++++++++++++++++++++ fri/src/core.rs | 6 +- fri/src/lib.rs | 2 +- 6 files changed, 403 insertions(+), 18 deletions(-) create mode 100644 fri/benches/fri_benches.rs diff --git a/Cargo.lock b/Cargo.lock index 41552413..ed013a5b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -115,6 +115,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + [[package]] name = "anyhow" version = "1.0.79" @@ -451,6 +457,15 @@ version = "6.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f840fb7195bcfc5e17ea40c26e5ce6d5b9ce5d584466e17703209657e459ae0" +[[package]] +name = "array-util" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e509844de8f09b90a2c3444684a2b6695f4071360e13d2fda0af9f749cc2ed6" +dependencies = [ + "arrayvec 0.7.4", +] + [[package]] name = "arrayref" version = "0.3.7" @@ -557,6 +572,7 @@ dependencies = [ "binius-prover", "binius-transcript", "binius-verifier", + "divan", "log", "parity-scale-codec", "primitive-types", @@ -639,7 +655,7 @@ dependencies = [ [[package]] name = "binius-core" version = "0.1.0" -source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +source = "git+https://github.com/binius-zk/binius64.git?rev=41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896#41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" dependencies = [ "binius-utils", "bytemuck", @@ -650,7 +666,7 @@ dependencies = [ [[package]] name = "binius-field" version = "0.1.0" -source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +source = "git+https://github.com/binius-zk/binius64.git?rev=41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896#41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" dependencies = [ "binius-utils", "bytemuck", @@ -664,7 +680,7 @@ dependencies = [ [[package]] name = "binius-math" version = "0.1.0" -source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +source = "git+https://github.com/binius-zk/binius64.git?rev=41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896#41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" dependencies = [ "binius-field", "binius-utils", @@ -680,7 +696,7 @@ dependencies = [ [[package]] name = "binius-prover" version = "0.1.0" -source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +source = "git+https://github.com/binius-zk/binius64.git?rev=41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896#41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" dependencies = [ "binius-core", "binius-field", @@ -703,7 +719,7 @@ dependencies = [ [[package]] name = "binius-transcript" version = "0.1.0" -source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +source = "git+https://github.com/binius-zk/binius64.git?rev=41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896#41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" dependencies = [ "auto_impl", "binius-field", @@ -717,8 +733,9 @@ dependencies = [ [[package]] name = "binius-utils" version = "0.1.0" -source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +source = "git+https://github.com/binius-zk/binius64.git?rev=41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896#41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" dependencies = [ + "array-util", "bytemuck", "bytes", "cfg-if", @@ -732,7 +749,7 @@ dependencies = [ [[package]] name = "binius-verifier" version = "0.1.0" -source = "git+https://github.com/binius-zk/binius64.git?rev=fe3e14a87dd59de1baf68505f98dc9cf7bf8595c#fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" +source = "git+https://github.com/binius-zk/binius64.git?rev=41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896#41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" dependencies = [ "binius-core", "binius-field", @@ -1025,11 +1042,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "bitflags 1.3.2", - "clap_lex", + "clap_lex 0.2.4", "indexmap 1.9.3", "textwrap", ] +[[package]] +name = "clap" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b" +dependencies = [ + "anstyle", + "clap_lex 0.7.6", + "terminal_size", +] + [[package]] name = "clap_lex" version = "0.2.4" @@ -1039,6 +1076,12 @@ dependencies = [ "os_str_bytes", ] +[[package]] +name = "clap_lex" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" + [[package]] name = "common" version = "0.1.0" @@ -1060,6 +1103,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101" +[[package]] +name = "condtype" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf0a07a401f374238ab8e2f11a104d2851bf9ce711ec69804834de8af45c7af" + [[package]] name = "const-oid" version = "0.9.6" @@ -1156,7 +1205,7 @@ dependencies = [ "atty", "cast", "ciborium", - "clap", + "clap 3.2.25", "criterion-plot", "itertools 0.10.5", "lazy_static", @@ -1371,6 +1420,31 @@ dependencies = [ "subtle", ] +[[package]] +name = "divan" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a405457ec78b8fe08b0e32b4a3570ab5dff6dd16eb9e76a5ee0a9d9cbd898933" +dependencies = [ + "cfg-if", + "clap 4.5.18", + "condtype", + "divan-macros", + "libc", + "regex-lite", +] + +[[package]] +name = "divan-macros" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9556bc800956545d6420a640173e5ba7dfa82f38d3ea5a167eb555bc69ac3323" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "dleq_vrf" version = "0.0.2" @@ -3158,6 +3232,12 @@ dependencies = [ "regex-syntax 0.8.2", ] +[[package]] +name = "regex-lite" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da" + [[package]] name = "regex-syntax" version = "0.6.29" @@ -4150,6 +4230,16 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "terminal_size" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +dependencies = [ + "rustix 0.38.31", + "windows-sys 0.48.0", +] + [[package]] name = "test-case" version = "1.2.3" diff --git a/Cargo.toml b/Cargo.toml index 421b6f45..d8c48540 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,11 +51,11 @@ poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", rev hash-db = { version = "0.16.0", default-features = false } # fri -binius-prover = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } -binius-verifier = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } -binius-math = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } -binius-field = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } -binius-transcript = { git = "https://github.com/binius-zk/binius64.git", rev = "fe3e14a87dd59de1baf68505f98dc9cf7bf8595c" } +binius-prover = { git = "https://github.com/binius-zk/binius64.git", rev = "41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" } +binius-verifier = { git = "https://github.com/binius-zk/binius64.git", rev = "41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" } +binius-math = { git = "https://github.com/binius-zk/binius64.git", rev = "41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" } +binius-field = { git = "https://github.com/binius-zk/binius64.git", rev = "41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" } +binius-transcript = { git = "https://github.com/binius-zk/binius64.git", rev = "41cda4a3eeb3fcb57bcd324e20a0ffe0b653f896" } # Others rayon = "1.5.2" diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 87c88285..3fc10a6f 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -13,10 +13,12 @@ binius-transcript = { workspace = true } avail-core = { path = "../core", default-features = false } log = { workspace = true, default-features = false } rayon = { version = "1.10", optional = true } +rand = { version = "0.9.1", default-features = false, features = ["std", "std_rng"], optional = true} [dev-dependencies] avail-core = { path = "../core", default-features = false, features = ["runtime"]} codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } +divan = { version = "0.1"} rand = { version = "0.9.1", default-features = false, features = ["std", "std_rng"] } primitive-types = { workspace = true } @@ -29,6 +31,12 @@ std = [ "avail-core/std", "log/std" ] # Enable rayon-based parallel code paths parallel = [ "rayon", "std" ] +bench = [ "std", "rand" ] testing = [] serde = [ "avail-core/serde" ] runtime = [ "avail-core/runtime" ] + +[[bench]] +name = "fri_benches" +harness = false +required-features = ["bench"] diff --git a/fri/benches/fri_benches.rs b/fri/benches/fri_benches.rs new file mode 100644 index 00000000..af28d4ab --- /dev/null +++ b/fri/benches/fri_benches.rs @@ -0,0 +1,287 @@ +#![allow(clippy::needless_pass_by_value)] + +use std::sync::Arc; +use avail_core::FriParamsVersion; +use avail_fri::{ + core::FriCommitOutput, e2e_helpers::commit_bytes, FriBiniusPCS, FriCommitment, FriContext, + PackedMLE, +}; +use binius_verifier::config::B128; +use divan::{black_box, Bencher}; +use rand::{rngs::StdRng, Rng, SeedableRng}; + +/// Blob sizes in MiB. +const SIZES_MB: &[usize] = &[2, 4, 8, 16, 32]; + +fn patterned_data(size: usize) -> Vec { + (0..size).map(|i| (i % 251) as u8).collect() +} + +/// Helper: setup PCS, context, packed MLE, commit_output, commitment for a given byte size. +fn setup_for_size( + size_bytes: usize, +) -> ( + FriBiniusPCS, + FriContext, + PackedMLE, + FriCommitOutput, + FriCommitment, +) { + let data = patterned_data(size_bytes); + + let params_version = FriParamsVersion(0); + let cfg = params_version.to_config(0); + + commit_bytes(cfg, &data).expect("commit_bytes must succeed") +} + +// commitment generation +fn fri_commit_for_size(bencher: Bencher, mb: usize) { + let size_bytes = mb * 1024 * 1024; + + bencher.bench_local(|| { + let data = patterned_data(size_bytes); + + let params_version = FriParamsVersion(0); + let cfg = params_version.to_config(0); + + let _ = commit_bytes(cfg, &data).expect("commit_bytes must succeed"); + black_box(()); + }); +} + +#[divan::bench(name = "fri_commit_2_mib")] +fn fri_commit_2_mib(bencher: Bencher) { + fri_commit_for_size(bencher, 2); +} + +#[divan::bench(name = "fri_commit_4_mib")] +fn fri_commit_4_mib(bencher: Bencher) { + fri_commit_for_size(bencher, 4); +} + +#[divan::bench(name = "fri_commit_8_mib")] +fn fri_commit_8_mib(bencher: Bencher) { + fri_commit_for_size(bencher, 8); +} + +#[divan::bench(name = "fri_commit_16_mib")] +fn fri_commit_16_mib(bencher: Bencher) { + fri_commit_for_size(bencher, 16); +} + +#[divan::bench(name = "fri_commit_32_mib")] +fn fri_commit_32_mib(bencher: Bencher) { + fri_commit_for_size(bencher, 32); +} + +// sampling prrof generation + +fn fri_sampling_proof_for_size(bencher: Bencher, mb: usize) { + let size_bytes = mb * 1024 * 1024; + + let (pcs, ctx, _packed, commit_output, _commitment) = setup_for_size(size_bytes); + let codeword_len = commit_output.codeword.len(); + let idx = codeword_len / 2; // middle cell + + bencher.bench_local(|| { + let mut transcript = pcs + .inclusion_proof::(&commit_output.committed, idx) + .expect("inclusion_proof must succeed"); + + black_box(&mut transcript); + black_box(&ctx); + }); +} + +#[divan::bench(name = "fri_sampling_proof_2_mib")] +fn fri_sampling_proof_2_mib(bencher: Bencher) { + fri_sampling_proof_for_size(bencher, 2); +} + +#[divan::bench(name = "fri_sampling_proof_4_mib")] +fn fri_sampling_proof_4_mib(bencher: Bencher) { + fri_sampling_proof_for_size(bencher, 4); +} + +#[divan::bench(name = "fri_sampling_proof_8_mib")] +fn fri_sampling_proof_8_mib(bencher: Bencher) { + fri_sampling_proof_for_size(bencher, 8); +} + +#[divan::bench(name = "fri_sampling_proof_16_mib")] +fn fri_sampling_proof_16_mib(bencher: Bencher) { + fri_sampling_proof_for_size(bencher, 16); +} + +#[divan::bench(name = "fri_sampling_proof_32_mib")] +fn fri_sampling_proof_32_mib(bencher: Bencher) { + fri_sampling_proof_for_size(bencher, 32); +} + +// sampling proof verification + +fn fri_sampling_verify_for_size(bencher: Bencher, mb: usize) { + let size_bytes = mb * 1024 * 1024; + + let (pcs, ctx, _packed, commit_output, commitment) = setup_for_size(size_bytes); + let codeword_len = commit_output.codeword.len(); + let idx = codeword_len / 2; + let value = commit_output.codeword[idx]; + + let base_transcript = pcs + .inclusion_proof::(&commit_output.committed, idx) + .expect("proof"); + + bencher.bench_local(|| { + // clone transcript to avoid re-proving inside loop + let mut tr = base_transcript.clone(); + pcs.verify_inclusion_proof(&mut tr, &[value], idx, &ctx, &commitment) + .expect("verify"); + black_box(tr); + }); +} + +#[divan::bench(name = "fri_sampling_verify_2_mib")] +fn fri_sampling_verify_2_mib(bencher: Bencher) { + fri_sampling_verify_for_size(bencher, 2); +} + +#[divan::bench(name = "fri_sampling_verify_4_mib")] +fn fri_sampling_verify_4_mib(bencher: Bencher) { + fri_sampling_verify_for_size(bencher, 4); +} + +#[divan::bench(name = "fri_sampling_verify_8_mib")] +fn fri_sampling_verify_8_mib(bencher: Bencher) { + fri_sampling_verify_for_size(bencher, 8); +} + +#[divan::bench(name = "fri_sampling_verify_16_mib")] +fn fri_sampling_verify_16_mib(bencher: Bencher) { + fri_sampling_verify_for_size(bencher, 16); +} + +#[divan::bench(name = "fri_sampling_verify_32_mib")] +fn fri_sampling_verify_32_mib(bencher: Bencher) { + fri_sampling_verify_for_size(bencher, 32); +} + +// evealuation proof generation + +fn fri_eval_prove_for_size(bencher: Bencher, mb: usize) { + let size_bytes = mb * 1024 * 1024; + + let (pcs, ctx, packed, commit_output, _commitment) = setup_for_size(size_bytes); + let mut rng = StdRng::from_seed([7u8; 32]); + let eval_point = pcs.sample_evaluation_point(&mut rng); + + let packed_values = Arc::new(packed.packed_values); + let packed_mle = Arc::new(packed.packed_mle); + + bencher.bench_local(|| { + let proof = pcs + .prove::( + &packed_values, + &packed_mle, + &ctx, + &commit_output, + &eval_point, + ) + .expect("prove"); + black_box(proof); + }); +} + +#[divan::bench(name = "fri_eval_prove_2_mib", max_time = 30)] +fn fri_eval_prove_2_mib(bencher: Bencher) { + fri_eval_prove_for_size(bencher, 2); +} + +#[divan::bench(name = "fri_eval_prove_4_mib", max_time = 30)] +fn fri_eval_prove_4_mib(bencher: Bencher) { + fri_eval_prove_for_size(bencher, 4); +} + +#[divan::bench(name = "fri_eval_prove_8_mib", max_time = 30)] +fn fri_eval_prove_8_mib(bencher: Bencher) { + fri_eval_prove_for_size(bencher, 8); +} + +#[divan::bench(name = "fri_eval_prove_16_mib", max_time = 30)] +fn fri_eval_prove_16_mib(bencher: Bencher) { + fri_eval_prove_for_size(bencher, 16); +} + +#[divan::bench(name = "fri_eval_prove_32_mib", max_time = 30)] +fn fri_eval_prove_32_mib(bencher: Bencher) { + fri_eval_prove_for_size(bencher, 32); +} + +// evaluation proof verification + +fn fri_eval_verify_for_size(bencher: Bencher, mb: usize) { + let size_bytes = mb * 1024 * 1024; + + let (pcs, ctx, packed, commit_output, _commitment) = setup_for_size(size_bytes); + let mut rng = StdRng::from_seed([8u8; 32]); + let eval_point = pcs.sample_evaluation_point(&mut rng); + + let proof = pcs + .prove::( + &packed.packed_values, + &packed.packed_mle, + &ctx, + &commit_output, + &eval_point, + ) + .expect("prove"); + + // Compute approximate proof size. + let fp_size = std::mem::size_of::(); + let eval_point_bytes = proof.evaluation_point.len() * fp_size; + let eval_claim_bytes = fp_size; + let commitment_bytes = proof.commitment.digest.len(); + let transcript_bytes = proof.transcript_bytes.len(); + let total_bytes = commitment_bytes + eval_point_bytes + eval_claim_bytes + transcript_bytes; + + println!( + "FRI eval proof size for {:>2} MiB blob ≈ {} bytes \ + (commitment={}, eval_point={}, claim={}, transcript={})", + mb, total_bytes, commitment_bytes, eval_point_bytes, eval_claim_bytes, transcript_bytes + ); + + bencher.bench_local(|| { + pcs.verify(&proof, &ctx).expect("verify"); + black_box(&proof); + }); +} + +#[divan::bench(name = "fri_eval_verify_2_mib")] +fn fri_eval_verify_2_mib(bencher: Bencher) { + fri_eval_verify_for_size(bencher, 2); +} + +#[divan::bench(name = "fri_eval_verify_4_mib")] +fn fri_eval_verify_4_mib(bencher: Bencher) { + fri_eval_verify_for_size(bencher, 4); +} + +#[divan::bench(name = "fri_eval_verify_8_mib")] +fn fri_eval_verify_8_mib(bencher: Bencher) { + fri_eval_verify_for_size(bencher, 8); +} + +#[divan::bench(name = "fri_eval_verify_16_mib")] +fn fri_eval_verify_16_mib(bencher: Bencher) { + fri_eval_verify_for_size(bencher, 16); +} + +#[divan::bench(name = "fri_eval_verify_32_mib")] +fn fri_eval_verify_32_mib(bencher: Bencher) { + fri_eval_verify_for_size(bencher, 32); +} + +fn main() { + divan::main(); +} diff --git a/fri/src/core.rs b/fri/src/core.rs index f43a9270..357bf365 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -30,9 +30,9 @@ use binius_verifier::{ pub use avail_core::{FriParamsConfig, FriParamsVersion}; pub use binius_verifier::config::B128; -#[cfg(test)] +#[cfg(any(test, feature = "bench"))] use binius_field::Random; -#[cfg(test)] +#[cfg(any(test, feature = "bench"))] use rand::{CryptoRng, RngCore}; // Concrete merkle prover type we’ll use everywhere. @@ -123,7 +123,7 @@ impl FriBiniusPCS { Ok(FriContext { fri_params, ntt }) } - #[cfg(test)] + #[cfg(any(test, feature = "bench"))] pub fn sample_evaluation_point(&self, rng: &mut R) -> Vec { let mut point = Vec::with_capacity(self.cfg.n_vars); for _ in 0..self.cfg.n_vars { diff --git a/fri/src/lib.rs b/fri/src/lib.rs index 6c39b7b0..de682f19 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -16,7 +16,7 @@ pub use error::FriBiniusError; pub use sampling::reconstruct_codeword_naive; pub use transcript::{transcript_from_bytes, transcript_to_bytes, VerifierTr}; -#[cfg(test)] +#[cfg(any(test, feature = "bench"))] pub mod e2e_helpers { use crate::core::FriCommitOutput; From 01553deecc2fbd83f18d7db7287b805a8e2d1b19 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Tue, 9 Dec 2025 23:10:09 +0530 Subject: [PATCH 13/16] Update evaluation proof & verify interface --- Cargo.toml | 3 + fri/benches/fri_benches.rs | 130 +++++++++++++++++++++++-------------- fri/src/core.rs | 56 +++++++--------- fri/src/lib.rs | 48 ++++++-------- fri/src/tests.rs | 28 +++----- 5 files changed, 139 insertions(+), 126 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d8c48540..4687220a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,9 @@ resolver = "2" members = ["core", "kate/recovery", "kate", "fri"] +[profile.release] +lto = "thin" + [workspace.dependencies] # Parity codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } diff --git a/fri/benches/fri_benches.rs b/fri/benches/fri_benches.rs index af28d4ab..475c3e46 100644 --- a/fri/benches/fri_benches.rs +++ b/fri/benches/fri_benches.rs @@ -1,6 +1,5 @@ #![allow(clippy::needless_pass_by_value)] -use std::sync::Arc; use avail_core::FriParamsVersion; use avail_fri::{ core::FriCommitOutput, e2e_helpers::commit_bytes, FriBiniusPCS, FriCommitment, FriContext, @@ -8,10 +7,7 @@ use avail_fri::{ }; use binius_verifier::config::B128; use divan::{black_box, Bencher}; -use rand::{rngs::StdRng, Rng, SeedableRng}; - -/// Blob sizes in MiB. -const SIZES_MB: &[usize] = &[2, 4, 8, 16, 32]; +use rand::{rngs::StdRng, SeedableRng}; fn patterned_data(size: usize) -> Vec { (0..size).map(|i| (i % 251) as u8).collect() @@ -50,27 +46,27 @@ fn fri_commit_for_size(bencher: Bencher, mb: usize) { }); } -#[divan::bench(name = "fri_commit_2_mib")] +#[divan::bench(max_time = 10)] fn fri_commit_2_mib(bencher: Bencher) { fri_commit_for_size(bencher, 2); } -#[divan::bench(name = "fri_commit_4_mib")] +#[divan::bench(max_time = 10)] fn fri_commit_4_mib(bencher: Bencher) { fri_commit_for_size(bencher, 4); } -#[divan::bench(name = "fri_commit_8_mib")] +#[divan::bench(max_time = 10)] fn fri_commit_8_mib(bencher: Bencher) { fri_commit_for_size(bencher, 8); } -#[divan::bench(name = "fri_commit_16_mib")] +#[divan::bench(max_time = 10)] fn fri_commit_16_mib(bencher: Bencher) { fri_commit_for_size(bencher, 16); } -#[divan::bench(name = "fri_commit_32_mib")] +#[divan::bench(max_time = 10)] fn fri_commit_32_mib(bencher: Bencher) { fri_commit_for_size(bencher, 32); } @@ -142,27 +138,27 @@ fn fri_sampling_verify_for_size(bencher: Bencher, mb: usize) { }); } -#[divan::bench(name = "fri_sampling_verify_2_mib")] +#[divan::bench(max_time = 10)] fn fri_sampling_verify_2_mib(bencher: Bencher) { fri_sampling_verify_for_size(bencher, 2); } -#[divan::bench(name = "fri_sampling_verify_4_mib")] +#[divan::bench(max_time = 10)] fn fri_sampling_verify_4_mib(bencher: Bencher) { fri_sampling_verify_for_size(bencher, 4); } -#[divan::bench(name = "fri_sampling_verify_8_mib")] +#[divan::bench(max_time = 10)] fn fri_sampling_verify_8_mib(bencher: Bencher) { fri_sampling_verify_for_size(bencher, 8); } -#[divan::bench(name = "fri_sampling_verify_16_mib")] +#[divan::bench(max_time = 10)] fn fri_sampling_verify_16_mib(bencher: Bencher) { fri_sampling_verify_for_size(bencher, 16); } -#[divan::bench(name = "fri_sampling_verify_32_mib")] +#[divan::bench(max_time = 10)] fn fri_sampling_verify_32_mib(bencher: Bencher) { fri_sampling_verify_for_size(bencher, 32); } @@ -176,44 +172,35 @@ fn fri_eval_prove_for_size(bencher: Bencher, mb: usize) { let mut rng = StdRng::from_seed([7u8; 32]); let eval_point = pcs.sample_evaluation_point(&mut rng); - let packed_values = Arc::new(packed.packed_values); - let packed_mle = Arc::new(packed.packed_mle); - bencher.bench_local(|| { let proof = pcs - .prove::( - &packed_values, - &packed_mle, - &ctx, - &commit_output, - &eval_point, - ) + .prove::(packed.packed_mle.clone(), &ctx, &commit_output, &eval_point) .expect("prove"); black_box(proof); }); } -#[divan::bench(name = "fri_eval_prove_2_mib", max_time = 30)] +#[divan::bench(max_time = 10)] fn fri_eval_prove_2_mib(bencher: Bencher) { fri_eval_prove_for_size(bencher, 2); } -#[divan::bench(name = "fri_eval_prove_4_mib", max_time = 30)] +#[divan::bench(max_time = 10)] fn fri_eval_prove_4_mib(bencher: Bencher) { fri_eval_prove_for_size(bencher, 4); } -#[divan::bench(name = "fri_eval_prove_8_mib", max_time = 30)] +#[divan::bench(max_time = 10)] fn fri_eval_prove_8_mib(bencher: Bencher) { fri_eval_prove_for_size(bencher, 8); } -#[divan::bench(name = "fri_eval_prove_16_mib", max_time = 30)] +#[divan::bench(max_time = 10)] fn fri_eval_prove_16_mib(bencher: Bencher) { fri_eval_prove_for_size(bencher, 16); } -#[divan::bench(name = "fri_eval_prove_32_mib", max_time = 30)] +#[divan::bench(max_time = 10)] fn fri_eval_prove_32_mib(bencher: Bencher) { fri_eval_prove_for_size(bencher, 32); } @@ -227,21 +214,24 @@ fn fri_eval_verify_for_size(bencher: Bencher, mb: usize) { let mut rng = StdRng::from_seed([8u8; 32]); let eval_point = pcs.sample_evaluation_point(&mut rng); + // Heavy part done once: claim + proof + let eval_claim = pcs + .calculate_evaluation_claim(&packed.packed_values, &eval_point) + .expect("claim"); + let proof = pcs - .prove::( - &packed.packed_values, - &packed.packed_mle, - &ctx, - &commit_output, - &eval_point, - ) + .prove::(packed.packed_mle.clone(), &ctx, &commit_output, &eval_point) .expect("prove"); - // Compute approximate proof size. - let fp_size = std::mem::size_of::(); - let eval_point_bytes = proof.evaluation_point.len() * fp_size; + // Approximate proof size "over the wire": + // - commitment: 32 bytes (already in header) + // - eval_point: n_vars * sizeof(B128) + // - eval_claim: sizeof(B128) + // - transcript: proof.transcript_bytes.len() + let fp_size = core::mem::size_of::(); + let eval_point_bytes = eval_point.len() * fp_size; let eval_claim_bytes = fp_size; - let commitment_bytes = proof.commitment.digest.len(); + let commitment_bytes = 32; let transcript_bytes = proof.transcript_bytes.len(); let total_bytes = commitment_bytes + eval_point_bytes + eval_claim_bytes + transcript_bytes; @@ -252,36 +242,82 @@ fn fri_eval_verify_for_size(bencher: Bencher, mb: usize) { ); bencher.bench_local(|| { - pcs.verify(&proof, &ctx).expect("verify"); + pcs.verify(&proof, eval_claim, &eval_point, &ctx) + .expect("verify"); black_box(&proof); }); } -#[divan::bench(name = "fri_eval_verify_2_mib")] +#[divan::bench(max_time = 10)] fn fri_eval_verify_2_mib(bencher: Bencher) { fri_eval_verify_for_size(bencher, 2); } -#[divan::bench(name = "fri_eval_verify_4_mib")] +#[divan::bench(max_time = 10)] fn fri_eval_verify_4_mib(bencher: Bencher) { fri_eval_verify_for_size(bencher, 4); } -#[divan::bench(name = "fri_eval_verify_8_mib")] +#[divan::bench(max_time = 10)] fn fri_eval_verify_8_mib(bencher: Bencher) { fri_eval_verify_for_size(bencher, 8); } -#[divan::bench(name = "fri_eval_verify_16_mib")] +#[divan::bench(max_time = 10)] fn fri_eval_verify_16_mib(bencher: Bencher) { fri_eval_verify_for_size(bencher, 16); } -#[divan::bench(name = "fri_eval_verify_32_mib")] +#[divan::bench(max_time = 10)] fn fri_eval_verify_32_mib(bencher: Bencher) { fri_eval_verify_for_size(bencher, 32); } +// evaluation claim (p(z)) computation +fn fri_eval_claim_for_size(bencher: Bencher, mb: usize) { + let size_bytes = mb * 1024 * 1024; + + let (pcs, _ctx, packed, _commit_output, _commitment) = setup_for_size(size_bytes); + + // Deterministic evaluation point. + let mut rng = StdRng::from_seed([42u8; 32]); + let eval_point = pcs.sample_evaluation_point(&mut rng); + + bencher.bench_local(|| { + let claim = pcs + .calculate_evaluation_claim(&packed.packed_values, &eval_point) + .expect("evaluation_claim must succeed"); + + // Prevent the optimizer from throwing this away. + black_box(claim); + }); +} + +#[divan::bench(max_time = 20)] +fn fri_eval_claim_2_mib(bencher: Bencher) { + fri_eval_claim_for_size(bencher, 2); +} + +#[divan::bench(max_time = 20)] +fn fri_eval_claim_4_mib(bencher: Bencher) { + fri_eval_claim_for_size(bencher, 4); +} + +#[divan::bench(max_time = 20)] +fn fri_eval_claim_8_mib(bencher: Bencher) { + fri_eval_claim_for_size(bencher, 8); +} + +#[divan::bench(max_time = 20)] +fn fri_eval_claim_16_mib(bencher: Bencher) { + fri_eval_claim_for_size(bencher, 16); +} + +#[divan::bench(max_time = 30)] +fn fri_eval_claim_32_mib(bencher: Bencher) { + fri_eval_claim_for_size(bencher, 32); +} + fn main() { divan::main(); } diff --git a/fri/src/core.rs b/fri/src/core.rs index 357bf365..f68e281f 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -58,9 +58,6 @@ pub struct FriCommitment { /// Evaluation proof #[derive(Clone, Debug)] pub struct FriProof { - pub commitment: FriCommitment, - pub evaluation_point: Vec, - pub evaluation_claim: B128, pub transcript_bytes: Vec, } @@ -71,8 +68,8 @@ pub struct FriContext { } pub struct FriBiniusPCS { - cfg: FriParamsConfig, - merkle_prover: DefaultMerkleProver, + pub(crate) cfg: FriParamsConfig, + pub(crate) merkle_prover: DefaultMerkleProver, } impl FriBiniusPCS { @@ -170,8 +167,7 @@ impl FriBiniusPCS { /// Generate a FRI evaluation proof. pub fn prove

( &self, - values: &[B128], - packed_mle: &FieldBuffer

, + packed_mle: FieldBuffer

, ctx: &FriContext, commit_output: &FriCommitOutput

, evaluation_point: &[B128], @@ -179,49 +175,45 @@ impl FriBiniusPCS { where P: PackedField + PackedExtension + PackedExtension, { - // Compute evaluation claim from scalar values - let evaluation_claim = self.calculate_evaluation_claim(values, evaluation_point)?; - - // Set up PCS prover and transcript let pcs = OneBitPCSProver::new(&ctx.ntt, &self.merkle_prover, &ctx.fri_params); let mut prover_transcript = ProverTranscript::new(Challenger::default()); - // First write commitment bytes into transcript + // Write commitment bytes to transcript. prover_transcript .message() .write_bytes(&commit_output.commitment); - // Run FRI proof generation + // Generate FRI proof. pcs.prove( &commit_output.codeword, &commit_output.committed, - packed_mle.clone(), + packed_mle, evaluation_point.to_vec(), &mut prover_transcript, ) .map_err(|e| FriBiniusError::Proof(e.to_string()))?; - // Turn prover transcript into verifier transcript and serialize it let verifier_transcript: VerifierTr = prover_transcript.into_verifier(); let transcript_bytes = crate::transcript::transcript_to_bytes(&verifier_transcript); - // Extract commitment digest as [u8; 32] - let digest: [u8; 32] = commit_output - .commitment - .as_slice() - .try_into() - .expect("commitment is 32 bytes by construction"); - - Ok(FriProof { - commitment: FriCommitment { digest }, - evaluation_point: evaluation_point.to_vec(), - evaluation_claim, - transcript_bytes, - }) + Ok(FriProof { transcript_bytes }) } /// Verify a proof produced by `prove`. - pub fn verify(&self, proof: &FriProof, ctx: &FriContext) -> Result<(), FriBiniusError> { + /// + /// Caller supplies: + /// - `evaluation_claim`: f(z) + /// - `evaluation_point`: z + /// - `ctx`: FRI parameters + NTT context + /// + /// Commitment is read from the transcript. + pub fn verify( + &self, + proof: &FriProof, + evaluation_claim: B128, + evaluation_point: &[B128], + ctx: &FriContext, + ) -> Result<(), FriBiniusError> { // Reconstruct transcript from bytes let mut transcript = crate::transcript::transcript_from_bytes(proof.transcript_bytes.clone()); @@ -233,11 +225,10 @@ impl FriBiniusPCS { let merkle_scheme = self.merkle_prover.scheme().clone(); - // Call the Binius FRI verification routine fri_verify( &mut transcript, - proof.evaluation_claim, - &proof.evaluation_point, + evaluation_claim, + evaluation_point, retrieved_commitment, &ctx.fri_params, &merkle_scheme, @@ -287,7 +278,6 @@ impl FriBiniusPCS { } } -// Helper conversions at bottom fn lift_small_to_large_field(small_field_elms: &[F]) -> Vec where F: Field, diff --git a/fri/src/lib.rs b/fri/src/lib.rs index de682f19..2eb5c710 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -5,30 +5,28 @@ pub mod error; pub mod sampling; pub mod transcript; +pub use crate::core::{ + DefaultMerkleProver, FriBiniusPCS, FriCommitOutput, FriCommitment, FriContext, FriParamsConfig, + FriParamsVersion, FriProof, B128, +}; +pub use crate::encoding::{BytesEncoder, PackedMLE}; +pub use crate::error::FriBiniusError; + #[cfg(test)] mod tests; - -pub use avail_core::FriParamsConfig; -pub use core::{FriBiniusPCS, FriCommitment, FriContext, FriProof}; -pub use encoding::{BytesEncoder, PackedMLE}; -pub use error::FriBiniusError; #[cfg(feature = "testing")] pub use sampling::reconstruct_codeword_naive; pub use transcript::{transcript_from_bytes, transcript_to_bytes, VerifierTr}; #[cfg(any(test, feature = "bench"))] pub mod e2e_helpers { - use crate::core::FriCommitOutput; - - use super::*; - use binius_verifier::config::B128; - use rand::{CryptoRng, RngCore}; + use crate::core::{FriBiniusPCS, FriCommitOutput, FriCommitment, FriContext, B128}; + use crate::encoding::{BytesEncoder, PackedMLE}; + use crate::FriBiniusError; + use avail_core::FriParamsConfig; + use rand::{CryptoRng, Rng}; - /// Prepare everything from raw bytes up to a commitment: - /// - bytes -> PackedMLE - /// - derive `n_vars` from the data - /// - build PCS + context - /// - commit + /// Commit-only helper used in tests/benches. pub fn commit_bytes( mut cfg: FriParamsConfig, data: &[u8], @@ -46,7 +44,7 @@ pub mod e2e_helpers { let encoder = BytesEncoder::::new(); let packed = encoder.bytes_to_packed_mle(data)?; - // fill in n_vars from data (UX: user doesn't have to know it) + // Fill n_vars from data cfg.n_vars = packed.total_n_vars; // PCS + FRI context @@ -66,27 +64,23 @@ pub mod e2e_helpers { Ok((pcs, ctx, packed, commit_output, commitment)) } - /// Convenience: commit + one evaluation proof + verification. - pub fn commit_prove_verify_bytes( + /// Full commit+prove+verify helper. + pub fn commit_prove_verify_bytes( cfg: FriParamsConfig, data: &[u8], rng: &mut R, ) -> Result<(), FriBiniusError> { let (pcs, ctx, packed, commit_output, _commitment) = commit_bytes(cfg, data)?; - // Sample evaluation point + // Sample evaluation point and compute claim let eval_point = pcs.sample_evaluation_point(rng); + let eval_claim = pcs.calculate_evaluation_claim(&packed.packed_values, &eval_point)?; // Generate proof - let proof = pcs.prove::( - &packed.packed_values, - &packed.packed_mle, - &ctx, - &commit_output, - &eval_point, - )?; + let proof = + pcs.prove::(packed.packed_mle.clone(), &ctx, &commit_output, &eval_point)?; // Verify - pcs.verify(&proof, &ctx) + pcs.verify(&proof, eval_claim, &eval_point, &ctx) } } diff --git a/fri/src/tests.rs b/fri/src/tests.rs index 69dc5e22..79bacc49 100644 --- a/fri/src/tests.rs +++ b/fri/src/tests.rs @@ -54,18 +54,11 @@ mod e2e_tests { let eval_point = pcs.sample_evaluation_point(&mut rng); let eval_claim = pcs.calculate_evaluation_claim(&packed.packed_values, &eval_point)?; - let proof = pcs.prove::( - &packed.packed_values, - &packed.packed_mle, - &ctx, - &commit_output, - &eval_point, - )?; + let proof = + pcs.prove::(packed.packed_mle.clone(), &ctx, &commit_output, &eval_point)?; - pcs.verify(&proof, &ctx)?; - - // Sanity: proof carries same claim we computed locally - assert_eq!(proof.evaluation_claim, eval_claim); + // Verify using the explicit claim + evaluation point + pcs.verify(&proof, eval_claim, &eval_point, &ctx)?; Ok(()) } @@ -337,18 +330,15 @@ mod e2e_tests { let mut rng = StdRng::from_seed([7u8; 32]); let eval_point = pcs.sample_evaluation_point(&mut rng); + let eval_claim = pcs + .calculate_evaluation_claim(&packed.packed_values, &eval_point) + .expect("claim must succeed"); let proof = pcs - .prove( - &packed.packed_values, - &packed.packed_mle, - &ctx, - &commit_output, - &eval_point, - ) + .prove::(packed.packed_mle.clone(), &ctx, &commit_output, &eval_point) .expect("prove must succeed"); - pcs.verify(&proof, &ctx) + pcs.verify(&proof, eval_claim, &eval_point, &ctx) .expect("Fri evaluation proof must verify"); } } From c6473ecc51bf3fbe7a04d98feadec10f0e88334e Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Wed, 10 Dec 2025 12:05:36 +0530 Subject: [PATCH 14/16] Add parallel processing to calculate evaluation claim --- fri/src/core.rs | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/fri/src/core.rs b/fri/src/core.rs index f68e281f..3852c177 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -138,9 +138,34 @@ impl FriBiniusPCS { let small_mle = large_field_mle_to_small_field::(values); let lifted = lift_small_to_large_field::(&small_mle); - let eq_vals = eq_ind_partial_eval(evaluation_point).as_ref().to_vec(); + let eq_vals = eq_ind_partial_eval(evaluation_point); + let eq_slice: &[B128] = eq_vals.as_ref(); + + if lifted.len() != eq_slice.len() { + return Err(FriBiniusError::Verification(format!( + "calculate_evaluation_claim: mismatched lengths: lifted={}, eq_slice={}", + lifted.len(), + eq_slice.len() + ))); + } + + #[cfg(feature = "parallel")] + { + use rayon::prelude::*; + + let acc = lifted + .par_iter() + .zip(eq_slice.par_iter()) + .map(|(a, b)| *a * *b) + .reduce(|| B128::ZERO, |x, y| x + y); - Ok(inner_product::(lifted, eq_vals)) + Ok(acc) + } + + #[cfg(not(feature = "parallel"))] + { + Ok(inner_product::(lifted, eq_slice.to_vec())) + } } pub fn commit

( From 027074df3accd2211ee12ed6336129b775888b80 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Thu, 11 Dec 2025 20:12:52 +0530 Subject: [PATCH 15/16] add fri_eval utils --- Cargo.lock | 2 + fri/Cargo.toml | 6 ++- fri/src/core.rs | 7 +++ fri/src/encoding.rs | 4 +- fri/src/error.rs | 6 ++- fri/src/eval_utils.rs | 121 ++++++++++++++++++++++++++++++++++++++++++ fri/src/lib.rs | 1 + 7 files changed, 143 insertions(+), 4 deletions(-) create mode 100644 fri/src/eval_utils.rs diff --git a/Cargo.lock b/Cargo.lock index ed013a5b..f1863ad3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -572,11 +572,13 @@ dependencies = [ "binius-prover", "binius-transcript", "binius-verifier", + "blake2b_simd", "divan", "log", "parity-scale-codec", "primitive-types", "rand 0.9.2", + "rand_chacha 0.3.1", "rayon", ] diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 3fc10a6f..fd7a3828 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -11,13 +11,15 @@ binius-field = { workspace = true } binius-transcript = { workspace = true } avail-core = { path = "../core", default-features = false } +blake2b_simd = { workspace = true } +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } log = { workspace = true, default-features = false } rayon = { version = "1.10", optional = true } +rand_chacha = { workspace = true, default-features = false } rand = { version = "0.9.1", default-features = false, features = ["std", "std_rng"], optional = true} [dev-dependencies] avail-core = { path = "../core", default-features = false, features = ["runtime"]} -codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } divan = { version = "0.1"} rand = { version = "0.9.1", default-features = false, features = ["std", "std_rng"] } primitive-types = { workspace = true } @@ -26,7 +28,7 @@ primitive-types = { workspace = true } default = [ "std" ] # This crate is node-side only currently -std = [ "avail-core/std", "log/std" ] +std = [ "avail-core/std", "log/std", "rand_chacha/std" ] # Enable rayon-based parallel code paths parallel = [ "rayon", "std" ] diff --git a/fri/src/core.rs b/fri/src/core.rs index 3852c177..71c29b1d 100644 --- a/fri/src/core.rs +++ b/fri/src/core.rs @@ -200,6 +200,13 @@ impl FriBiniusPCS { where P: PackedField + PackedExtension + PackedExtension, { + if evaluation_point.len() != self.cfg.n_vars { + return Err(FriBiniusError::InvalidEvaluationPoint( + self.cfg.n_vars, + evaluation_point.len(), + )); + } + let pcs = OneBitPCSProver::new(&ctx.ntt, &self.merkle_prover, &ctx.fri_params); let mut prover_transcript = ProverTranscript::new(Challenger::default()); diff --git a/fri/src/encoding.rs b/fri/src/encoding.rs index 8b9ba018..cca0b663 100644 --- a/fri/src/encoding.rs +++ b/fri/src/encoding.rs @@ -36,7 +36,9 @@ where pub fn bytes_to_packed_mle(&self, data: &[u8]) -> Result, FriBiniusError> { if data.is_empty() { - return Err(FriBiniusError::InvalidInput("input data must be non-empty")); + return Err(FriBiniusError::InvalidInput(format!( + "input data must be non-empty" + ))); } // Number of 128-bit field elements needed diff --git a/fri/src/error.rs b/fri/src/error.rs index c332339c..39b37181 100644 --- a/fri/src/error.rs +++ b/fri/src/error.rs @@ -12,7 +12,8 @@ pub enum FriBiniusError { Encoding(String), Transcript(String), Reconstruction(String), - InvalidInput(&'static str), + InvalidInput(String), + InvalidEvaluationPoint(usize, usize), } impl fmt::Display for FriBiniusError { @@ -30,6 +31,9 @@ impl fmt::Display for FriBiniusError { Transcript(e) => write!(f, "Transcript error: {e}"), Reconstruction(e) => write!(f, "Reconstruction error: {e}"), InvalidInput(msg) => write!(f, "Invalid input: {msg}"), + InvalidEvaluationPoint(e, g) => { + write!(f, "Invalid evaluation point, expected: {e}, got {g}") + }, } } } diff --git a/fri/src/eval_utils.rs b/fri/src/eval_utils.rs new file mode 100644 index 00000000..e198287b --- /dev/null +++ b/fri/src/eval_utils.rs @@ -0,0 +1,121 @@ +use crate::error::FriBiniusError; +use binius_verifier::config::B128; +use blake2b_simd::Params as Blake2bParams; +use core::convert::TryInto; +use rand_chacha::rand_core::{RngCore, SeedableRng}; +use rand_chacha::ChaChaRng; + +const EVAL_POINT_SEED_DOMAIN: &[u8] = b"avail-fri-eval-point-seed:v1"; + +/// Derive a 32-byte seed from provided inputs. +fn derive_seed_from_inputs(rand_src: &[u8], blob_hash: &[u8]) -> [u8; 32] { + let mut hasher = Blake2bParams::new().hash_length(32).to_state(); + hasher.update(EVAL_POINT_SEED_DOMAIN); + hasher.update(rand_src); + hasher.update(blob_hash); + let digest = hasher.finalize(); + let bytes = digest.as_bytes(); + let mut seed = [0u8; 32]; + seed.copy_from_slice(&bytes[..32]); + seed +} + +/// Deterministically generate an evaluation point (n_vars coordinates), returning Vec. +/// - `rand_src` : arbitrary randomness (e.g. epoch randomness bytes) +/// - `blob_hash`: blob identifier (e.g. blob commitment or H256) +/// - `n_vars` : number of coordinates (from FriParams / packed MLE) +pub fn derive_evaluation_point(rand_src: &[u8], blob_hash: &[u8], n_vars: usize) -> Vec { + let seed = derive_seed_from_inputs(rand_src, blob_hash); + let mut rng = ChaChaRng::from_seed(seed); + + let mut out = Vec::with_capacity(n_vars); + let mut buf = [0u8; 16]; + + for _ in 0..n_vars { + rng.fill_bytes(&mut buf); + let v = u128::from_le_bytes(buf); + out.push(B128::from(v)); + } + + out +} + +/// Serialize evaluation point -> bytes (concatenate 16-byte LE representations). +pub fn eval_point_to_bytes(point: &[B128]) -> Vec { + let mut out = Vec::with_capacity(point.len() * 16); + for p in point { + // convert B128 -> u128; B128 likely has From and Into + let v: u128 = (*p).into(); + out.extend_from_slice(&v.to_le_bytes()); + } + out +} + +/// Deserialize bytes -> evaluation point (expects 16*N bytes). +pub fn eval_point_from_bytes(bytes: &[u8]) -> Result, FriBiniusError> { + if bytes.len() % 16 != 0 { + return Err(FriBiniusError::InvalidInput(format!( + "eval_point bytes length not multiple of 16: {}", + bytes.len() + ))); + } + let n = bytes.len() / 16; + let mut out = Vec::with_capacity(n); + for i in 0..n { + let chunk = &bytes[i * 16..(i + 1) * 16]; + let v = u128::from_le_bytes(chunk.try_into().unwrap()); + out.push(B128::from(v)); + } + Ok(out) +} + +/// Serialize evaluation claim (single B128) to 16 bytes. +pub fn eval_claim_to_bytes(claim: B128) -> [u8; 16] { + let v: u128 = claim.into(); + v.to_le_bytes() +} + +/// Deserialize evaluation claim from 16 bytes. +pub fn eval_claim_from_bytes(bytes: &[u8]) -> Result { + if bytes.len() != 16 { + return Err(FriBiniusError::InvalidInput(format!( + "Expected 16 bytes for evaluation claim, but got {}", + bytes.len() + ))); + } + let arr: [u8; 16] = bytes.try_into().expect("length checked above"); + let v = u128::from_le_bytes(arr); + Ok(B128::from(v)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn roundtrip_eval_point_bytes() { + let rand_src = [7u8; 32]; + let blob_hash = [11u8; 32]; + let n = 10; + let p = derive_evaluation_point(&rand_src, &blob_hash, n); + assert_eq!(p.len(), n); + let b = eval_point_to_bytes(&p); + let p2 = eval_point_from_bytes(&b).unwrap(); + assert_eq!(p.len(), p2.len()); + for (a, c) in p.into_iter().zip(p2.into_iter()) { + assert_eq!(u128::from(a), u128::from(c)); + } + } + + #[test] + fn claim_serialize_roundtrip() { + let mut rng = ChaChaRng::from_seed([1u8; 32]); + let mut buf = [0u8; 16]; + rng.fill_bytes(&mut buf); + let val = u128::from_le_bytes(buf); + let claim = B128::from(val); + let bytes = eval_claim_to_bytes(claim); + let claim2 = eval_claim_from_bytes(&bytes).unwrap(); + assert_eq!(u128::from(claim), u128::from(claim2)); + } +} diff --git a/fri/src/lib.rs b/fri/src/lib.rs index 2eb5c710..7072585f 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -1,6 +1,7 @@ pub mod core; pub mod encoding; pub mod error; +pub mod eval_utils; #[cfg(feature = "testing")] pub mod sampling; pub mod transcript; From d33781a3b7f6817105b88057b8754df86e69f385 Mon Sep 17 00:00:00 2001 From: Toufeeq Pasha Date: Wed, 17 Dec 2025 14:36:41 +0530 Subject: [PATCH 16/16] Refactor evaluation utilities: update seed derivation function and simplify evaluation point generation --- fri/src/encoding.rs | 67 +++++++++++++++++++++++++++++++++++++++++++ fri/src/eval_utils.rs | 13 ++++----- 2 files changed, 73 insertions(+), 7 deletions(-) diff --git a/fri/src/encoding.rs b/fri/src/encoding.rs index cca0b663..239a8dce 100644 --- a/fri/src/encoding.rs +++ b/fri/src/encoding.rs @@ -6,6 +6,7 @@ use core::marker::PhantomData; const BYTES_PER_ELEMENT: usize = 16; // 128 bits const BITS_PER_ELEMENT: usize = 128; +const LOG_SCALAR_BIT_WIDTH: usize = BITS_PER_ELEMENT.ilog2() as usize; // log2(128) = 7 pub struct BytesEncoder

{ log_scalar_bit_width: usize, @@ -102,3 +103,69 @@ where Self::new() } } + +/// Derive MLE dimensions from a blob size in bytes. +/// +/// This utility mirrors the logic in `BytesEncoder::bytes_to_packed_mle` and +/// computes: +/// +/// - `log_len`: the log₂ size of the *big-field* MLE (number of multilinear +/// variables over `B128`) +/// - `n_vars`: the *total* number of MLE variables after scalar-bit expansion +/// +/// # Returns +/// +/// `(log_len, n_vars)` where: +/// +/// - `log_len` = log₂(next_pow2(ceil(blob_size_bytes / 16))) +/// - `n_vars` = log_len + LOG_SCALAR_BIT_WIDTH +/// +/// # Explanation +/// +/// - Blob bytes are packed into 128-bit field elements (`B128`) +/// - Each element consumes 16 bytes +/// - The element count is padded to the next power of two +/// - The padded size determines the number of big-field MLE variables +/// - Each `B128` element expands into `LOG_SCALAR_BIT_WIDTH` scalar bits +/// +/// # Constants (for B128) +/// +/// - `BYTES_PER_ELEMENT = 16` +/// - `LOG_SCALAR_BIT_WIDTH = 7` (since 128 = 2⁷) +/// +/// # Panics +/// +/// Panics if `blob_size_bytes == 0`. +/// +/// # Example +/// +/// ```text +/// blob_size_bytes = 1_048_576 (1 MiB) +/// num_elements = 65_536 +/// padded_elements = 65_536 +/// log_len = 16 +/// n_vars = 16 + 7 = 23 +/// ``` +pub fn mle_dims_from_blob_size(blob_size_bytes: usize) -> (usize, usize) { + assert!(blob_size_bytes > 0, "blob must be non-empty"); + + // Number of 128-bit field elements + let num_elements = (blob_size_bytes + BYTES_PER_ELEMENT - 1) / BYTES_PER_ELEMENT; + + // Pad to power of two + let padded_elements = num_elements.next_power_of_two(); + + // Big-field MLE variables + let big_field_n_vars = padded_elements.ilog2() as usize; + + // (log_len, total_n_vars) + (big_field_n_vars, big_field_n_vars + LOG_SCALAR_BIT_WIDTH) +} + +#[test] +fn n_vars_matches_encoder() { + let blob_size = 1024 * 1024; // 1 MiB + let (log_len, n_vars) = mle_dims_from_blob_size(blob_size); + assert_eq!(n_vars, 23); + assert_eq!(log_len, 16); +} \ No newline at end of file diff --git a/fri/src/eval_utils.rs b/fri/src/eval_utils.rs index e198287b..9375321c 100644 --- a/fri/src/eval_utils.rs +++ b/fri/src/eval_utils.rs @@ -8,7 +8,9 @@ use rand_chacha::ChaChaRng; const EVAL_POINT_SEED_DOMAIN: &[u8] = b"avail-fri-eval-point-seed:v1"; /// Derive a 32-byte seed from provided inputs. -fn derive_seed_from_inputs(rand_src: &[u8], blob_hash: &[u8]) -> [u8; 32] { +/// - `rand_src` : arbitrary randomness (e.g. epoch randomness bytes) +/// - `blob_hash`: blob identifier (e.g. blob commitment or H256) +pub fn derive_seed_from_inputs(rand_src: &[u8], blob_hash: &[u8]) -> [u8; 32] { let mut hasher = Blake2bParams::new().hash_length(32).to_state(); hasher.update(EVAL_POINT_SEED_DOMAIN); hasher.update(rand_src); @@ -21,11 +23,9 @@ fn derive_seed_from_inputs(rand_src: &[u8], blob_hash: &[u8]) -> [u8; 32] { } /// Deterministically generate an evaluation point (n_vars coordinates), returning Vec. -/// - `rand_src` : arbitrary randomness (e.g. epoch randomness bytes) -/// - `blob_hash`: blob identifier (e.g. blob commitment or H256) +/// - `seed` : 32-byte seed /// - `n_vars` : number of coordinates (from FriParams / packed MLE) -pub fn derive_evaluation_point(rand_src: &[u8], blob_hash: &[u8], n_vars: usize) -> Vec { - let seed = derive_seed_from_inputs(rand_src, blob_hash); +pub fn derive_evaluation_point(seed: [u8; 32], n_vars: usize) -> Vec { let mut rng = ChaChaRng::from_seed(seed); let mut out = Vec::with_capacity(n_vars); @@ -95,9 +95,8 @@ mod tests { #[test] fn roundtrip_eval_point_bytes() { let rand_src = [7u8; 32]; - let blob_hash = [11u8; 32]; let n = 10; - let p = derive_evaluation_point(&rand_src, &blob_hash, n); + let p = derive_evaluation_point(rand_src, n); assert_eq!(p.len(), n); let b = eval_point_to_bytes(&p); let p2 = eval_point_from_bytes(&b).unwrap();