From 7b7f08fd904a7ef7f6f3a6ecb678f6a77d242a6d Mon Sep 17 00:00:00 2001 From: AlekseiVambol Date: Sun, 3 Mar 2024 00:55:40 +0200 Subject: [PATCH 1/6] LogUp integration --- halo2_proofs/Cargo.toml | 8 +- halo2_proofs/benches/lookups.rs | 239 ++++++++++ halo2_proofs/src/arithmetic.rs | 39 ++ halo2_proofs/src/dev.rs | 82 ++-- halo2_proofs/src/dev/failure.rs | 124 +++--- halo2_proofs/src/plonk.rs | 2 +- halo2_proofs/src/plonk/circuit.rs | 186 +++++--- halo2_proofs/src/plonk/evaluation.rs | 249 ++++++++--- halo2_proofs/src/plonk/keygen.rs | 2 +- halo2_proofs/src/plonk/mv_lookup.rs | 96 ++++ halo2_proofs/src/plonk/mv_lookup/prover.rs | 438 +++++++++++++++++++ halo2_proofs/src/plonk/mv_lookup/verifier.rs | 195 +++++++++ halo2_proofs/src/plonk/prover.rs | 14 +- halo2_proofs/src/plonk/verifier.rs | 7 +- halo2_proofs/src/poly.rs | 2 +- halo2_proofs/tests/plonk_api.rs | 428 ++---------------- 16 files changed, 1482 insertions(+), 629 deletions(-) create mode 100644 halo2_proofs/benches/lookups.rs create mode 100644 halo2_proofs/src/plonk/mv_lookup.rs create mode 100644 halo2_proofs/src/plonk/mv_lookup/prover.rs create mode 100644 halo2_proofs/src/plonk/mv_lookup/verifier.rs diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index e340407dd6..2b4b3dae9c 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -43,6 +43,10 @@ harness = false name = "dev_lookup" harness = false +[[bench]] +name = "lookups" +harness = false + [[bench]] name = "fft" harness = false @@ -80,7 +84,7 @@ serde_json = "1" getrandom = { version = "0.2", features = ["js"] } [features] -default = ["batch", "bits"] +default = ["batch", "bits", "logup_skip_inv"] dev-graph = ["plotters", "tabbycat"] test-dev-graph = [ "dev-graph", @@ -97,6 +101,8 @@ circuit-params = [] cost-estimator = ["serde", "serde_derive"] derive_serde = ["halo2curves/derive_serde"] +logup_skip_inv = [] + [lib] bench = false diff --git a/halo2_proofs/benches/lookups.rs b/halo2_proofs/benches/lookups.rs new file mode 100644 index 0000000000..bf488fb974 --- /dev/null +++ b/halo2_proofs/benches/lookups.rs @@ -0,0 +1,239 @@ +#[macro_use] +extern crate criterion; + +use ff::PrimeField; +use halo2_proofs::circuit::{Layouter, SimpleFloorPlanner, Value}; +use halo2_proofs::plonk::*; +use halo2_proofs::poly::kzg::multiopen::VerifierGWC; +use halo2_proofs::poly::{commitment::ParamsProver, Rotation}; +use halo2_proofs::transcript::{Blake2bRead, Blake2bWrite, Challenge255}; +use halo2curves::bn256::{Bn256, G1Affine}; +use halo2curves::pairing::Engine; +use rand_core::OsRng; + +use halo2_proofs::{ + poly:: + kzg::{ + commitment::{KZGCommitmentScheme, ParamsKZG}, + multiopen::ProverGWC, + strategy::SingleStrategy, + }, + transcript::{TranscriptReadBuffer, TranscriptWriterBuffer}, +}; + +use std::marker::PhantomData; + +use criterion::{BenchmarkId, Criterion}; + +fn criterion_benchmark(c: &mut Criterion) { + #[derive(Clone, Default)] + struct MyCircuit { + _marker: PhantomData, + } + + #[derive(Clone)] + struct MyConfig { + selector: Selector, + table: TableColumn, + advice: Column, + other_advice: Column, + } + + impl Circuit for MyCircuit { + type Config = MyConfig; + type FloorPlanner = SimpleFloorPlanner; + + fn without_witnesses(&self) -> Self { + Self::default() + } + + fn configure(meta: &mut ConstraintSystem) -> MyConfig { + let config = MyConfig { + selector: meta.complex_selector(), + table: meta.lookup_table_column(), + advice: meta.advice_column(), + other_advice: meta.advice_column(), + }; + + let dummy_selector = meta.complex_selector(); + + meta.create_gate("degree 6 gate", |meta| { + let dummy_selector = meta.query_selector(dummy_selector); + let constraints = vec![dummy_selector.clone(); 4].iter() + .fold(dummy_selector.clone(), |acc, val| acc * val.clone()); + Constraints::with_selector(dummy_selector, Some(constraints)) + }); + + meta.lookup("lookup", |meta| { + let advice = meta.query_advice(config.advice, Rotation::cur()); + vec![(advice, config.table)] + }); + + meta.lookup("lookup", |meta| { + let advice = meta.query_advice(config.advice, Rotation::cur()); + vec![(advice, config.table)] + }); + + meta.lookup("lookup", |meta| { + let advice = meta.query_advice(config.advice, Rotation::cur()); + vec![(advice, config.table)] + }); + + meta.lookup("lookup", |meta| { + let advice = meta.query_advice(config.advice, Rotation::cur()); + vec![(advice, config.table)] + }); + + meta.lookup("lookup", |meta| { + let advice = meta.query_advice(config.advice, Rotation::cur()); + vec![(advice, config.table)] + }); + + /* + - We need degree at least 6 because 6 - 1 = 5 and we need to go to extended domain of 8n + - Our goal is to get to max degree of 9 because now 9 - 1 = 8 and that will fit into domain + + - base degree = table_deg + 2 + - if we put input_expression_degree = 1 + => degree = base + 1 = 3 + 1 = 4 + - we can batch one more with 5 more lookups + */ + + config + } + + fn synthesize( + &self, + config: MyConfig, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_table( + || "8-bit table", + |mut table| { + for row in 0u64..(1 << 8) { + table.assign_cell( + || format!("row {}", row), + config.table, + row as usize, + || Value::known(F::from(row)), + )?; + } + + Ok(()) + }, + )?; + + layouter.assign_region( + || "assign values", + |mut region| { + for offset in 0u64..(1 << 10) { + config.selector.enable(&mut region, offset as usize)?; + region.assign_advice( + || format!("offset {}", offset), + config.advice, + offset as usize, + || Value::known(F::from(offset % 256)), + )?; + } + for offset in 1u64..(1 << 10) { + config.selector.enable(&mut region, offset as usize)?; + region.assign_advice( + || format!("offset {}", offset), + config.other_advice, + offset as usize - 1, + || Value::known(F::from(offset % 256)), + )?; + } + Ok(()) + }, + ) + } + } + + fn keygen(k: u32) -> (ParamsKZG, ProvingKey) { + let params: ParamsKZG = ParamsKZG::new(k); + let empty_circuit: MyCircuit<::Fr> = MyCircuit { + _marker: PhantomData, + }; + let vk = keygen_vk(¶ms, &empty_circuit).expect("keygen_vk should not fail"); + let pk = keygen_pk(¶ms, vk, &empty_circuit).expect("keygen_pk should not fail"); + (params, pk) + } + + fn prover(_k: u32, params: &ParamsKZG, pk: &ProvingKey) -> Vec { + let rng = OsRng; + + let circuit: MyCircuit<::Fr> = MyCircuit { + _marker: PhantomData, + }; + + let mut transcript = Blake2bWrite::<_, _, Challenge255>::init(vec![]); + create_proof::, ProverGWC<'_, Bn256>, _, _, _, _>( + params, + pk, + &[circuit], + &[&[]], + rng, + &mut transcript, + ) + .expect("proof generation should not fail"); + transcript.finalize() + } + + fn verifier(params: &ParamsKZG, vk: &VerifyingKey, proof: &[u8]) { + let strategy = SingleStrategy::new(params); + let mut transcript = Blake2bRead::<_, _, Challenge255>::init(proof); + assert!(verify_proof::< + KZGCommitmentScheme, + VerifierGWC<'_, Bn256>, + Challenge255, + Blake2bRead<&[u8], G1Affine, Challenge255>, + SingleStrategy<'_, Bn256>, + >(params, vk, strategy, &[&[]], &mut transcript) + .is_ok()); + } + + let k_range = 16..=16; + + let mut keygen_group = c.benchmark_group("plonk-keygen"); + keygen_group.sample_size(10); + for k in k_range.clone() { + keygen_group.bench_with_input(BenchmarkId::from_parameter(k), &k, |b, &k| { + b.iter(|| keygen(k)); + }); + } + keygen_group.finish(); + + let mut prover_group = c.benchmark_group("plonk-prover"); + prover_group.sample_size(10); + for k in k_range.clone() { + let (params, pk) = keygen(k); + + prover_group.bench_with_input( + BenchmarkId::from_parameter(k), + &(k, ¶ms, &pk), + |b, &(k, params, pk)| { + b.iter(|| prover(k, params, pk)); + }, + ); + } + prover_group.finish(); + + let mut verifier_group = c.benchmark_group("plonk-verifier"); + for k in k_range { + let (params, pk) = keygen(k); + let proof = prover(k, ¶ms, &pk); + + verifier_group.bench_with_input( + BenchmarkId::from_parameter(k), + &(¶ms, pk.get_vk(), &proof[..]), + |b, &(params, vk, proof)| { + b.iter(|| verifier(params, vk, proof)); + }, + ); + } + verifier_group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/halo2_proofs/src/arithmetic.rs b/halo2_proofs/src/arithmetic.rs index 0163e355eb..24afe4f38d 100644 --- a/halo2_proofs/src/arithmetic.rs +++ b/halo2_proofs/src/arithmetic.rs @@ -381,6 +381,14 @@ where q } +/// Inverts multiplicatively each nonzero entry in the slice. Zero-valued +/// entries are not changed. The inversion process is parallelized. +pub fn par_invert(values: &mut [F]) { + parallelize(values, |values, _start| { + values.batch_invert(); + }); +} + /// This utility function will parallelize an operation that is to be /// performed over a mutable slice. pub fn parallelize(v: &mut [T], f: F) { @@ -433,6 +441,37 @@ pub fn parallelize(v: &mu }); } +/// This simple utility function will parallelize an operation that is to be +/// performed over a mutable slice. +/// This naive version will have all chunks except the last one of the same size. +/// !! This is important for the mv_lookup prover parallel scan implementation at the moment. !! +pub(crate) fn parallelize_naive( + v: &mut [T], + f: F, +) -> Vec { + let n = v.len(); + let num_threads = multicore::current_num_threads(); + let mut chunk = n / num_threads; + if chunk < num_threads { + chunk = 1; + } + + multicore::scope(|scope| { + let mut chunk_starts = vec![]; + for (chunk_num, v) in v.chunks_mut(chunk).enumerate() { + let f = f.clone(); + scope.spawn(move |_| { + let start = chunk_num * chunk; + f(v, start); + }); + let start = chunk_num * chunk; + chunk_starts.push(start); + } + + chunk_starts + }) +} + fn log2_floor(num: usize) -> u32 { assert!(num > 0); diff --git a/halo2_proofs/src/dev.rs b/halo2_proofs/src/dev.rs index 7a3aca10cc..06f0147cd6 100644 --- a/halo2_proofs/src/dev.rs +++ b/halo2_proofs/src/dev.rs @@ -620,7 +620,7 @@ impl + Ord> MockProver { let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); #[cfg(not(feature = "circuit-params"))] let config = ConcreteCircuit::configure(&mut cs); - let cs = cs; + let cs = cs.chunk_lookups(); assert!( n >= cs.minimum_rows(), @@ -962,7 +962,9 @@ impl + Ord> MockProver { .iter() .enumerate() .flat_map(|(lookup_index, lookup)| { - assert!(lookup.table_expressions.len() == lookup.input_expressions.len()); + for input_expressions in lookup.inputs_expressions.iter() { + assert!(lookup.table_expressions.len() == input_expressions.len()); + } assert!(self.usable_rows.end > 0); // We optimize on the basis that the table might have been filled so that the last @@ -1009,43 +1011,49 @@ impl + Ord> MockProver { } let table = &cached_table; - let mut inputs: Vec<(Vec<_>, usize)> = lookup_input_row_ids - .clone() - .into_par_iter() - .filter_map(|input_row| { - let t = lookup - .input_expressions - .iter() - .map(move |c| load(c, input_row)) + lookup + .inputs_expressions + .iter() + .map(|input_expressions| { + let mut inputs: Vec<(Vec<_>, usize)> = lookup_input_row_ids + .clone() + .into_par_iter() + .filter_map(|input_row| { + let t = input_expressions + .iter() + .map(move |c| load(c, input_row)) + .collect(); + + if t != fill_row { + // Also keep track of the original input row, since we're going to sort. + Some((t, input_row)) + } else { + None + } + }) .collect(); - if t != fill_row { - // Also keep track of the original input row, since we're going to sort. - Some((t, input_row)) - } else { - None - } - }) - .collect(); - inputs.par_sort_unstable(); - - inputs - .par_iter() - .filter_map(move |(input, input_row)| { - if table.binary_search(input).is_err() { - Some(VerifyFailure::Lookup { - name: lookup.name.clone(), - lookup_index, - location: FailureLocation::find_expressions( - &self.cs, - &self.regions, - *input_row, - lookup.input_expressions.iter(), - ), + inputs.par_sort_unstable(); + + inputs + .par_iter() + .filter_map(move |(input, input_row)| { + if table.binary_search(input).is_err() { + Some(VerifyFailure::Lookup { + name: lookup.name.to_string(), + lookup_index, + location: FailureLocation::find_expressions( + &self.cs, + &self.regions, + *input_row, + input_expressions.iter(), + ), + }) + } else { + None + } }) - } else { - None - } + .collect::>() }) .collect::>() }); @@ -1161,7 +1169,7 @@ impl + Ord> MockProver { let mut errors: Vec<_> = iter::empty() .chain(selector_errors) .chain(gate_errors) - .chain(lookup_errors) + .chain(lookup_errors.flatten()) .chain(perm_errors) .chain(shuffle_errors) .collect(); diff --git a/halo2_proofs/src/dev/failure.rs b/halo2_proofs/src/dev/failure.rs index f9f5c27ded..d0414b63cd 100644 --- a/halo2_proofs/src/dev/failure.rs +++ b/halo2_proofs/src/dev/failure.rs @@ -501,7 +501,7 @@ fn render_constraint_not_satisfied( /// ``` fn render_lookup( prover: &MockProver, - name: &str, + _name: &str, lookup_index: usize, location: &FailureLocation, ) { @@ -589,8 +589,10 @@ fn render_lookup( eprintln!("error: lookup input does not exist in table"); eprint!(" ("); - for i in 0..lookup.input_expressions.len() { - eprint!("{}L{}", if i == 0 { "" } else { ", " }, i); + for input_expressions in lookup.inputs_expressions.iter() { + for i in 0..input_expressions.len() { + eprint!("{}L{}", if i == 0 { "" } else { ", " }, i); + } } eprint!(") ∉ ("); @@ -600,69 +602,65 @@ fn render_lookup( eprintln!(")"); eprintln!(); - eprintln!(" Lookup '{name}' inputs:"); - for (i, input) in lookup.input_expressions.iter().enumerate() { - // Fetch the cell values (since we don't store them in VerifyFailure::Lookup). - let cell_values = input.evaluate( - &|_| BTreeMap::default(), - &|_| panic!("virtual selectors are removed during optimization"), - &cell_value(&util::load(n, row, &cs.fixed_queries, &prover.fixed)), - &cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)), - &cell_value(&util::load_instance( - n, - row, - &cs.instance_queries, - &prover.instance, - )), - &|_| BTreeMap::default(), - &|a| a, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|mut a, mut b| { - a.append(&mut b); - a - }, - &|a, _| a, - ); - - // Collect the necessary rendering information: - // - The columns involved in this constraint. - // - How many cells are in each column. - // - The grid of cell values, indexed by rotation. - let mut columns = BTreeMap::::default(); - let mut layout = BTreeMap::>::default(); - for (i, (cell, _)) in cell_values.iter().enumerate() { - *columns.entry(cell.column).or_default() += 1; - layout - .entry(cell.rotation) - .or_default() - .entry(cell.column) - .or_insert(format!("x{i}")); - } + eprintln!(" Lookup inputs:"); + for input_expressions in lookup.inputs_expressions.iter() { + for (i, input) in input_expressions.iter().enumerate() { + // Fetch the cell values (since we don't store them in VerifyFailure::Lookup). + let cell_values = input.evaluate( + &|_| BTreeMap::default(), + &|_| panic!("virtual selectors are removed during optimization"), + &cell_value(&util::load(n, row, &cs.fixed_queries, prover.fixed.as_slice())), + &cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)), + &cell_value(&util::load_instance(n, row, &cs.instance_queries, &prover.instance)), + &|_| BTreeMap::default(), + &|a| a, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|mut a, mut b| { + a.append(&mut b); + a + }, + &|a, _| a, + ); + + // Collect the necessary rendering information: + // - The columns involved in this constraint. + // - How many cells are in each column. + // - The grid of cell values, indexed by rotation. + let mut columns = BTreeMap::::default(); + let mut layout = BTreeMap::>::default(); + for (i, (cell, _)) in cell_values.iter().enumerate() { + *columns.entry(cell.column).or_default() += 1; + layout + .entry(cell.rotation) + .or_default() + .entry(cell.column) + .or_insert(format!("x{}", i)); + } + if i != 0 { + eprintln!(); + } - if i != 0 { - eprintln!(); - } - eprintln!( - " L{} = {}", - i, - emitter::expression_to_string(input, &layout) - ); - eprintln!(" ^"); + eprintln!( + " L{} = {}", + i, + emitter::expression_to_string(input, &layout) + ); + eprintln!(" ^"); - emitter::render_cell_layout(" | ", location, &columns, &layout, |_, rotation| { - if rotation == 0 { - eprint!(" <--{{ Lookup '{name}' inputs queried here"); + emitter::render_cell_layout(" | ", location, &columns, &layout, |_, rotation| { + if rotation == 0 { + eprint!(" <--{{ Lookup inputs queried here"); + } + }); + // Print the map from local variables to assigned values. + eprintln!(" |"); + eprintln!(" | Assigned cell values:"); + for (i, (_, value)) in cell_values.iter().enumerate() { + eprintln!(" | x{} = {}", i, value); } - }); - - // Print the map from local variables to assigned values. - eprintln!(" |"); - eprintln!(" | Assigned cell values:"); - for (i, (_, value)) in cell_values.iter().enumerate() { - eprintln!(" | x{i} = {value}"); } } } diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 78bfc21501..0e822d6c48 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -25,7 +25,7 @@ mod circuit; mod error; mod evaluation; mod keygen; -mod lookup; +mod mv_lookup; pub mod permutation; mod shuffle; mod vanishing; diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 5107554186..165693589d 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1,4 +1,5 @@ -use super::{lookup, permutation, shuffle, Assigned, Error}; +use std::collections::BTreeMap; +use super::{mv_lookup, permutation, shuffle, Assigned, Error}; use crate::circuit::layouter::SyncDeps; use crate::dev::metadata; use crate::{ @@ -479,7 +480,7 @@ impl Selector { } /// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct FixedQuery { /// Query index pub(crate) index: Option, @@ -502,7 +503,7 @@ impl FixedQuery { } /// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct AdviceQuery { /// Query index pub(crate) index: Option, @@ -532,7 +533,7 @@ impl AdviceQuery { } /// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct InstanceQuery { /// Query index pub(crate) index: Option, @@ -1545,6 +1546,13 @@ impl Gate { } } +/// TODO doc +#[derive(Debug, Clone)] +pub struct LookupTracker { + pub(crate) table: Vec>, + pub(crate) inputs: Vec>>, +} + /// This is a description of the circuit environment, such as the gate, column and /// permutation arrangements. #[derive(Debug, Clone)] @@ -1580,9 +1588,12 @@ pub struct ConstraintSystem { // Permutation argument for performing equality constraints pub(crate) permutation: permutation::Argument, + /// Map from table expression to vec of vec of input expressions + pub lookups_map: BTreeMap>, + // Vector of lookup arguments, where each corresponds to a sequence of // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, + pub(crate) lookups: Vec>, // Vector of shuffle arguments, where each corresponds to a sequence of // input expressions and a sequence of shuffle expressions involved in the shuffle. @@ -1613,7 +1624,7 @@ pub struct PinnedConstraintSystem<'a, F: Field> { instance_queries: &'a Vec<(Column, Rotation)>, fixed_queries: &'a Vec<(Column, Rotation)>, permutation: &'a permutation::Argument, - lookups: &'a Vec>, + lookups_map: &'a BTreeMap>, shuffles: &'a Vec>, constants: &'a Vec>, minimum_degree: &'a Option, @@ -1640,7 +1651,7 @@ impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { .field("instance_queries", self.instance_queries) .field("fixed_queries", self.fixed_queries) .field("permutation", self.permutation) - .field("lookups", self.lookups); + .field("lookups_map", self.lookups_map); if !self.shuffles.is_empty() { debug_struct.field("shuffles", self.shuffles); } @@ -1679,6 +1690,7 @@ impl Default for ConstraintSystem { num_advice_queries: Vec::new(), instance_queries: Vec::new(), permutation: permutation::Argument::new(), + lookups_map: BTreeMap::default(), lookups: Vec::new(), shuffles: Vec::new(), general_column_annotations: HashMap::new(), @@ -1706,7 +1718,7 @@ impl ConstraintSystem { advice_queries: &self.advice_queries, instance_queries: &self.instance_queries, permutation: &self.permutation, - lookups: &self.lookups, + lookups_map: &self.lookups_map, shuffles: &self.shuffles, constants: &self.constants, minimum_degree: &self.minimum_degree, @@ -1738,11 +1750,12 @@ impl ConstraintSystem { /// they need to match. pub fn lookup>( &mut self, - name: S, + // FIXME use name in debug messages + _name: S, table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, TableColumn)>, - ) -> usize { + ) { let mut cells = VirtualCells::new(self); - let table_map = table_map(&mut cells) + let (input_expressions, table_expressions): (Vec<_>, Vec<_>) = table_map(&mut cells) .into_iter() .map(|(mut input, table)| { if input.contains_simple_selector() { @@ -1753,13 +1766,88 @@ impl ConstraintSystem { table.query_cells(&mut cells); (input, table) }) - .collect(); - let index = self.lookups.len(); + .unzip(); + let table_expressions_identifier = table_expressions + .iter() + .fold(String::new(), |string, expr| string + &expr.identifier()); + self.lookups_map + .entry(table_expressions_identifier) + .and_modify(|table_tracker| table_tracker.inputs.push(input_expressions.clone())) + .or_insert(LookupTracker { + table: table_expressions, + inputs: vec![input_expressions], + }); + } - self.lookups - .push(lookup::Argument::new(name.as_ref(), table_map)); + /// Chunk lookup arguments into pieces below a given degree bound + pub fn chunk_lookups(mut self) -> Self { + if self.lookups_map.is_empty() { + return self; + } - index + let max_gate_degree = self.max_gate_degree(); + let max_single_lookup_degree: usize = self + .lookups_map + .values() + .map(|v| { + let table_degree = v.table.iter().map(|expr| expr.degree()).max().unwrap(); + let base_lookup_degree = super::mv_lookup::base_degree(table_degree); + + let max_inputs_degree: usize = v + .inputs + .iter() + .map(|input| input.iter().map(|expr| expr.degree()).max().unwrap()) + .max() + .unwrap(); + + mv_lookup::degree_with_input(base_lookup_degree, max_inputs_degree) + }) + .max() + .unwrap(); + + let required_degree = std::cmp::max(max_gate_degree, max_single_lookup_degree); + let required_degree = (required_degree as u64 - 1).next_power_of_two() as usize; + + self.set_minimum_degree(required_degree + 1); + + // safe to unwrap here + let minimum_degree = self.minimum_degree.unwrap(); + + let mut lookups: Vec<_> = vec![]; + for v in self.lookups_map.values() { + let LookupTracker { table, inputs } = v; + let mut args = vec![super::mv_lookup::Argument::new( + "lookup", + table, + &[inputs[0].clone()], + )]; + + for input in inputs.iter().skip(1) { + let cur_input_degree = input.iter().map(|expr| expr.degree()).max().unwrap(); + let mut indicator = false; + for arg in args.iter_mut() { + // try to fit input in one of the args + let cur_argument_degree = arg.required_degree(); + let new_potential_degree = cur_argument_degree + cur_input_degree; + if new_potential_degree <= minimum_degree { + arg.inputs_expressions.push(input.clone()); + indicator = true; + break; + } + } + + if !indicator { + args.push(super::mv_lookup::Argument::new( + "dummy", + table, + &[input.clone()], + )) + } + } + lookups.append(&mut args); + } + self.lookups = lookups; + self } /// Add a lookup argument for some input expressions and table expressions. @@ -1768,30 +1856,25 @@ impl ConstraintSystem { /// they need to match. pub fn lookup_any>( &mut self, - name: S, + // FIXME use name in debug messages + _name: S, table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, - ) -> usize { + ) { let mut cells = VirtualCells::new(self); - let table_map = table_map(&mut cells) - .into_iter() - .map(|(mut input, mut table)| { - if input.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - if table.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - input.query_cells(&mut cells); - table.query_cells(&mut cells); - (input, table) - }) - .collect(); - let index = self.lookups.len(); - - self.lookups - .push(lookup::Argument::new(name.as_ref(), table_map)); - - index + let table_map = table_map(&mut cells); + let (input_expressions, table_expressions): (Vec<_>, Vec<_>) = + table_map.into_iter().unzip(); + let table_expressions_identifier = table_expressions + .iter() + .fold(String::new(), |string, expr| string + &expr.identifier()); + + self.lookups_map + .entry(table_expressions_identifier) + .and_modify(|table_tracker| table_tracker.inputs.push(input_expressions.clone())) + .or_insert(LookupTracker { + table: table_expressions, + inputs: vec![input_expressions], + }); } /// Add a shuffle argument for some input expressions and table expressions. @@ -1923,7 +2006,9 @@ impl ConstraintSystem { /// larger amount than actually needed. This can be used, for example, to /// force the permutation argument to involve more columns in the same set. pub fn set_minimum_degree(&mut self, degree: usize) { - self.minimum_degree = Some(degree); + self.minimum_degree = self + .minimum_degree + .map_or(Some(degree), |min_degree| Some(max(min_degree, degree))); } /// Creates a new gate. @@ -2109,8 +2194,9 @@ impl ConstraintSystem { // lookup expressions for expr in self.lookups.iter_mut().flat_map(|lookup| { lookup - .input_expressions + .inputs_expressions .iter_mut() + .flatten() .chain(lookup.table_expressions.iter_mut()) }) { replace_selectors(expr, selector_replacements, true); @@ -2300,6 +2386,15 @@ impl ConstraintSystem { (0..=max_phase).map(sealed::Phase) } + /// Compute the maximum degree of gates in the constraint system + pub fn max_gate_degree(&self) -> usize { + self.gates + .iter() + .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) + .max() + .unwrap_or(0) + } + /// Compute the degree of the constraint system (the maximum degree of all /// constraints). pub fn degree(&self) -> usize { @@ -2331,14 +2426,7 @@ impl ConstraintSystem { // Account for each gate to ensure our quotient polynomial is the // correct degree and that our extended domain is the right size. - degree = std::cmp::max( - degree, - self.gates - .iter() - .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) - .max() - .unwrap_or(0), - ); + degree = std::cmp::max(degree, self.max_gate_degree()); std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) } @@ -2453,7 +2541,7 @@ impl ConstraintSystem { } /// Returns lookup arguments - pub fn lookups(&self) -> &Vec> { + pub fn lookups(&self) -> &Vec> { &self.lookups } diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index 431c487c7e..b92327ea37 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -1,14 +1,19 @@ use crate::multicore; -use crate::plonk::{lookup, permutation, Any, ProvingKey}; +use crate::plonk::{mv_lookup, permutation, Any, ProvingKey}; use crate::poly::Basis; use crate::{ arithmetic::{parallelize, CurveAffine}, poly::{Coeff, ExtendedLagrangeCoeff, Polynomial, Rotation}, }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; - use super::{shuffle, ConstraintSystem, Expression}; +#[cfg(not(feature = "logup_skip_inv"))] +use rayon::{slice::ParallelSlice, prelude::{ParallelIterator, IntoParallelIterator}}; + +#[cfg(not(feature = "logup_skip_inv"))] +use crate::arithmetic::par_invert; + /// Return the index in the polynomial of size `isize` after rotation `rot`. fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { (((idx as i32) + (rot * rot_scale)).rem_euclid(isize)) as usize @@ -168,7 +173,7 @@ pub struct Evaluator { /// Custom gates evalution pub custom_gates: GraphEvaluator, /// Lookups evalution - pub lookups: Vec>, + pub lookups: Vec<(Vec>, GraphEvaluator)>, /// Shuffle evalution pub shuffles: Vec>, } @@ -226,9 +231,12 @@ impl Evaluator { // Lookups for lookup in cs.lookups.iter() { - let mut graph = GraphEvaluator::default(); + let mut graph_table = GraphEvaluator::default(); + let mut graph_inputs: Vec<_> = (0..lookup.inputs_expressions.len()) + .map(|_| GraphEvaluator::default()) + .collect(); - let mut evaluate_lc = |expressions: &Vec>| { + let evaluate_lc = |graph: &mut GraphEvaluator, expressions: &Vec>| { let parts = expressions .iter() .map(|expr| graph.add_expression(expr)) @@ -240,22 +248,32 @@ impl Evaluator { )) }; - // Input coset - let compressed_input_coset = evaluate_lc(&lookup.input_expressions); + // Inputs cosets + for (input_expressions, graph_input) in lookup + .inputs_expressions + .iter() + .zip(graph_inputs.iter_mut()) + { + let compressed_input_coset = evaluate_lc(graph_input, input_expressions); + + graph_input.add_calculation(Calculation::Add( + compressed_input_coset, + ValueSource::Beta(), + )); + } + // table coset - let compressed_table_coset = evaluate_lc(&lookup.table_expressions); - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - let right_gamma = graph.add_calculation(Calculation::Add( + let compressed_table_coset = evaluate_lc(&mut graph_table, &lookup.table_expressions); + + graph_table.add_calculation(Calculation::Add( compressed_table_coset, - ValueSource::Gamma(), - )); - let lc = graph.add_calculation(Calculation::Add( - compressed_input_coset, ValueSource::Beta(), )); - graph.add_calculation(Calculation::Mul(lc, right_gamma)); - - ev.lookups.push(graph); + /* + a) f_i + beta + b) t + beta + */ + ev.lookups.push((graph_inputs.to_vec(), graph_table)); } // Shuffles @@ -306,7 +324,7 @@ impl Evaluator { beta: C::ScalarExt, gamma: C::ScalarExt, theta: C::ScalarExt, - lookups: &[Vec>], + lookups: &[Vec>], shuffles: &[Vec>], permutations: &[permutation::prover::Committed], ) -> Polynomial { @@ -464,30 +482,128 @@ impl Evaluator { }); } + + // For lookups, compute inputs_inv_sum = ∑ 1 / (f_i(X) + beta) + // The outer vector has capacity self.lookups.len() + #[cfg(not(feature = "logup_skip_inv"))] + let inputs_inv_sum: Vec> = self + .lookups + .iter() + .map(|lookup| { + let (inputs_lookup_evaluator, _) = lookup; + + let inputs_values_for_extended_domain: Vec> = (0..size) + .into_par_iter() + .map(|idx| { + let mut inputs_eval_data: Vec<_> = inputs_lookup_evaluator + .iter() + .map(|input_lookup_evaluator| { + input_lookup_evaluator.instance() + }) + .collect(); + + inputs_lookup_evaluator + .iter() + .zip(inputs_eval_data.iter_mut()) + .map(|(input_lookup_evaluator, input_eval_data)| { + input_lookup_evaluator.evaluate( + input_eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ) + }) + .collect() + }) + .collect(); + + let mut inputs_values_for_extended_domain: Vec = + inputs_values_for_extended_domain + .into_iter() + .flatten() + .collect(); + + par_invert(&mut inputs_values_for_extended_domain); + + let inputs_len = inputs_lookup_evaluator.len(); + + inputs_values_for_extended_domain + .par_chunks_exact(inputs_len) + .map(|values| values.iter().sum()) + .collect::>() + + }) + .collect(); + // Lookups for (n, lookup) in lookups.iter().enumerate() { // Polynomials required for this lookup. // Calculated here so these only have to be kept in memory for the short time // they are actually needed. - let product_coset = pk.vk.domain.coeff_to_extended(lookup.product_poly.clone()); - let permuted_input_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_input_poly.clone()); - let permuted_table_coset = pk - .vk - .domain - .coeff_to_extended(lookup.permuted_table_poly.clone()); + + let phi_coset = pk.vk.domain.coeff_to_extended(lookup.phi_poly.clone()); + let m_coset = pk.vk.domain.coeff_to_extended(lookup.m_poly.clone()); // Lookup constraints + /* + φ_i(X) = f_i(X) + beta + τ(X) = t(X) + beta + LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) (1) + = (τ(X) * Π(φ_i(X)) * ∑ 1/(φ_i(X))) - Π(φ_i(X)) * m(X) + = Π(φ_i(X)) * (τ(X) * ∑ 1/(φ_i(X)) - m(X)) + = ∑_i τ(X) * Π_{j != i} φ_j(X) - m(X) * Π(φ_i(X)) (2) + */ parallelize(&mut values, |values, start| { - let lookup_evaluator = &self.lookups[n]; - let mut eval_data = lookup_evaluator.instance(); + let (inputs_lookup_evaluator, table_lookup_evaluator) = &self.lookups[n]; + let mut inputs_eval_data: Vec<_> = inputs_lookup_evaluator + .iter() + .map(|input_lookup_evaluator| input_lookup_evaluator.instance()) + .collect(); + let mut table_eval_data = table_lookup_evaluator.instance(); + for (i, value) in values.iter_mut().enumerate() { let idx = start + i; + // f_i(X) + beta for i in expressions + let inputs_value: Vec = inputs_lookup_evaluator + .iter() + .zip(inputs_eval_data.iter_mut()) + .map(|(input_lookup_evaluator, input_eval_data)| { + input_lookup_evaluator.evaluate( + input_eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ) + }) + .collect(); + + // Π(φ_i(X)) + let inputs_prod: C::Scalar = inputs_value + .iter() + .fold(C::Scalar::ONE, |acc, input| acc * input); - let table_value = lookup_evaluator.evaluate( - &mut eval_data, + // t(X) + beta + let table_value = table_lookup_evaluator.evaluate( + &mut table_eval_data, fixed, advice, instance, @@ -503,38 +619,45 @@ impl Evaluator { ); let r_next = get_rotation_idx(idx, 1, rot_scale, isize); - let r_prev = get_rotation_idx(idx, -1, rot_scale, isize); - - let a_minus_s = permuted_input_coset[idx] - permuted_table_coset[idx]; - // l_0(X) * (1 - z(X)) = 0 - *value = *value * y + ((one - product_coset[idx]) * l0[idx]); - // l_last(X) * (z(X)^2 - z(X)) = 0 - *value = *value * y - + ((product_coset[idx] * product_coset[idx] - product_coset[idx]) - * l_last[idx]); - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) - // (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - *value = *value * y - + ((product_coset[r_next] - * (permuted_input_coset[idx] + beta) - * (permuted_table_coset[idx] + gamma) - - product_coset[idx] * table_value) - * l_active_row[idx]); - // Check that the first values in the permuted input expression and permuted - // fixed expression are the same. - // l_0(X) * (a'(X) - s'(X)) = 0 - *value = *value * y + (a_minus_s * l0[idx]); - // Check that each value in the permuted lookup input expression is either - // equal to the value above it, or the value at the same index in the - // permuted table expression. - // (1 - (l_last + l_blind)) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - *value = *value * y - + (a_minus_s - * (permuted_input_coset[idx] - permuted_input_coset[r_prev]) - * l_active_row[idx]); + let lhs = { + // τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + table_value * inputs_prod * (phi_coset[r_next] - phi_coset[idx]) + }; + + #[cfg(feature = "logup_skip_inv")] + let rhs = { + // τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + // = ∑_i τ(X) * Π_{j != i} φ_j(X) - m(X) * Π(φ_i(X)) + let inputs = (0..inputs_value.len()) + .map(|i| { + inputs_value + .iter() + .enumerate() + .filter(|(j, _)| *j != i) + .fold(C::Scalar::ONE, |acc, (_, x)| acc * *x) + }) + .fold(C::Scalar::ZERO, |acc, x| acc + x); + inputs * table_value - inputs_prod * m_coset[idx] + }; + + #[cfg(not(feature = "logup_skip_inv"))] + let rhs = { + // ∑ 1 / (f_i(X) + beta) at ω^idx + let inv_sum: C::Scalar = inputs_inv_sum[n][idx]; + // τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + // = (τ(X) * Π(φ_i(X)) * ∑ 1/(φ_i(X))) - Π(φ_i(X)) * m(X) + // = Π(φ_i(X)) * (τ(X) * ∑ 1/(φ_i(X)) - m(X)) + inputs_prod * (table_value * inv_sum - m_coset[idx]) + }; + + // phi[0] = 0 + *value = *value * y + l0[idx] * phi_coset[idx]; + + // phi[u] = 0 + *value = *value * y + l_last[idx] * phi_coset[idx]; + + // q(X) = (1 - (l_last(X) + l_blind(X))) * (LHS - RHS) + *value = *value * y + (lhs - rhs) * l_active_row[idx]; } }); } diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 984eecb9e8..2ee0469e90 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -40,7 +40,7 @@ where let config = ConcreteCircuit::configure_with_params(&mut cs, params); #[cfg(not(feature = "circuit-params"))] let config = ConcreteCircuit::configure(&mut cs); - + let cs = cs.chunk_lookups(); let degree = cs.degree(); let domain = EvaluationDomain::new(degree as u32, k); diff --git a/halo2_proofs/src/plonk/mv_lookup.rs b/halo2_proofs/src/plonk/mv_lookup.rs new file mode 100644 index 0000000000..b4fd2d825f --- /dev/null +++ b/halo2_proofs/src/plonk/mv_lookup.rs @@ -0,0 +1,96 @@ +use super::circuit::Expression; +use ff::Field; +use std::fmt::{self, Debug}; + +pub(crate) mod prover; +pub(crate) mod verifier; + +/// Degree of lookup without inputs +pub fn base_degree(table_degree: usize) -> usize { + // let lhs_degree = table_degree + inputs_expressions_degree + 1 + // let degree = lhs_degree + 1 + std::cmp::max(3, table_degree + 2) +} + +pub fn degree_with_input(base_degree: usize, input_expression_degree: usize) -> usize { + base_degree + input_expression_degree +} + +#[derive(Clone)] +pub struct Argument { + pub name: &'static str, + pub(crate) table_expressions: Vec>, + pub(crate) inputs_expressions: Vec>>, +} + +impl Debug for Argument { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Argument") + .field("table_expressions", &self.table_expressions) + .field("inputs_expressions", &self.inputs_expressions) + .finish() + } +} + +impl Argument { + /// Constructs a new lookup argument. + pub fn new(name: &'static str, table: &[Expression], input: &[Vec>]) -> Self { + Self { + name, + table_expressions: table.to_owned(), + inputs_expressions: input.to_owned(), + } + } + + pub(crate) fn required_degree(&self) -> usize { + assert!(self + .inputs_expressions + .iter() + .all(|input| input.len() == self.table_expressions.len())); + + let expr_degree = |input_expressions: &Vec>| { + let mut input_degree = 0; + for expr in input_expressions.iter() { + input_degree = std::cmp::max(input_degree, expr.degree()); + } + + input_degree + }; + + let inputs_expressions_degree: usize = self + .inputs_expressions + .iter() + .map(|input_expressions| expr_degree(input_expressions)) + .sum(); + + let table_degree = expr_degree(&self.table_expressions); + + /* + φ_i(X) = f_i(X) + α + τ(X) = t(X) + α + LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + = table_degree + sum(input_degree) + 1 + RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + + deg(q(X)) = (1 - (q_last + q_blind)) * (LHS - RHS) + = 1 + LHS + */ + + let lhs_degree = table_degree + inputs_expressions_degree + 1; + let degree = lhs_degree + 1; + + // 3 = phi + q_blind + table (where table is = 1) + // + 1 for each of inputs expressions + std::cmp::max(3 + self.inputs_expressions.len(), degree) + } + + /// Returns input of this argument + pub fn input_expressions(&self) -> &Vec>> { + &self.inputs_expressions + } + + /// Returns table of this argument + pub fn table_expressions(&self) -> &Vec> { + &self.table_expressions + } +} diff --git a/halo2_proofs/src/plonk/mv_lookup/prover.rs b/halo2_proofs/src/plonk/mv_lookup/prover.rs new file mode 100644 index 0000000000..96e1241d7b --- /dev/null +++ b/halo2_proofs/src/plonk/mv_lookup/prover.rs @@ -0,0 +1,438 @@ +use super::super::{ + circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX, Error, + ProvingKey, +}; +use super::Argument; +use crate::plonk::evaluation::evaluate; +use crate::{ + arithmetic::{eval_polynomial, parallelize, parallelize_naive, CurveAffine, Field, par_invert}, + poly::{ + commitment::{Blind, Params}, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, + Rotation, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use ff::WithSmallOrderMulGroup; +use group::Curve; +use rand_core::RngCore; +use std::{ + iter, + ops::{Mul, MulAssign}, +}; + +use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator, ParallelSliceMut}; + +#[derive(Debug)] +pub(in crate::plonk) struct Prepared { + compressed_inputs_expressions: Vec>, + compressed_table_expression: Polynomial, + m_values: Polynomial, +} + +#[derive(Debug)] +pub(in crate::plonk) struct Committed { + pub(in crate::plonk) m_poly: Polynomial, + pub(in crate::plonk) phi_poly: Polynomial, +} + +pub(in crate::plonk) struct Evaluated { + constructed: Committed, +} + +impl + Ord> Argument { + pub(in crate::plonk) fn prepare< + 'a, + 'params: 'a, + C, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + &self, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + #[allow(unused_mut)] mut rng: R, // in case we want to blind (do we actually need zk?) + transcript: &mut T, + ) -> Result, Error> + where + C: CurveAffine, + C::Curve: Mul + MulAssign, + { + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[Expression]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the lookup and compress them + let compressed_inputs_expressions: Vec<_> = self + .inputs_expressions + .iter() + .map(|input_expressions| compress_expressions(input_expressions)) + .collect(); + + // Get values of table expressions involved in the lookup and compress them + let compressed_table_expression = compress_expressions(&self.table_expressions); + + let blinding_factors = pk.vk.cs.blinding_factors(); + + // compute m(X) + let mut sorted_table_with_indices = compressed_table_expression + .iter() + .take(params.n() as usize - blinding_factors - 1) + .enumerate() + .map(|(i, t)| (t, i)) + .collect::>(); + sorted_table_with_indices.par_sort_by_key(|(&t, _)| t); + + let m_values: Vec = { + use std::sync::atomic::{AtomicU64, Ordering}; + let m_values: Vec = (0..params.n()).map(|_| AtomicU64::new(0)).collect(); + + for compressed_input_expression in compressed_inputs_expressions.iter() { + let _ = compressed_input_expression + .par_iter() + .take(params.n() as usize - blinding_factors - 1) + .try_for_each(|fi| -> Result<(), Error> { + let index = sorted_table_with_indices + .binary_search_by_key(&fi, |&(t, _)| t) + .map_err(|_| Error::ConstraintSystemFailure)?; + let index = sorted_table_with_indices[index].1; + + m_values[index].fetch_add(1, Ordering::Relaxed); + Ok(()) + }); + } + + m_values + .par_iter() + .map(|mi| F::from(mi.load(Ordering::Relaxed) as u64)) + .collect() + }; + let m_values = pk.vk.domain.lagrange_from_vec(m_values); + + #[cfg(feature = "sanity-checks")] + { + // check that m is zero after blinders + let invalid_ms = m_values + .iter() + .skip(params.n() as usize - blinding_factors) + .collect::>(); + assert_eq!(invalid_ms.len(), blinding_factors); + for mi in invalid_ms { + assert_eq!(*mi, C::Scalar::zero()); + } + + // check sums + let alpha = C::Scalar::random(&mut rng); + let cs_input_sum = + |compressed_input_expression: &Polynomial| { + let mut lhs_sum = C::Scalar::zero(); + for &fi in compressed_input_expression + .iter() + .take(params.n() as usize - blinding_factors - 1) + { + lhs_sum += (fi + alpha).invert().unwrap(); + } + + lhs_sum + }; + + let mut lhs_sum = C::Scalar::zero(); + + for compressed_input_expression in compressed_inputs_expressions.iter() { + lhs_sum += cs_input_sum(compressed_input_expression); + } + + let mut rhs_sum = C::Scalar::zero(); + for (&ti, &mi) in compressed_table_expression.iter().zip(m_values.iter()) { + rhs_sum += mi * (ti + alpha).invert().unwrap(); + } + + assert_eq!(lhs_sum, rhs_sum); + } + + // commit to m(X) + // TODO: should we use zero instead? + let blind = Blind(C::Scalar::random(rng)); + let m_commitment = params.commit_lagrange(&m_values, blind).to_affine(); + + // write commitment of m(X) to transcript + transcript.write_point(m_commitment)?; + + Ok(Prepared { + compressed_inputs_expressions, + compressed_table_expression, + m_values, + }) + } +} + +impl Prepared { + pub(in crate::plonk) fn commit_grand_sum< + 'params, + P: Params<'params, C>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + >( + self, + pk: &ProvingKey, + params: &P, + beta: ChallengeBeta, + mut rng: R, + transcript: &mut T, + ) -> Result, Error> { + /* + φ_i(X) = f_i(X) + beta + τ(X) = t(X) + beta + LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + */ + + // ∑ 1/(φ_i(X)) + let mut inputs_log_derivatives = vec![C::Scalar::ZERO; params.n() as usize]; + for compressed_input_expression in self.compressed_inputs_expressions.iter() { + let mut input_log_derivatives = vec![C::Scalar::ZERO; params.n() as usize]; + + parallelize( + &mut input_log_derivatives, + |input_log_derivatives, start| { + for (input_log_derivative, fi) in input_log_derivatives + .iter_mut() + .zip(compressed_input_expression[start..].iter()) + { + *input_log_derivative = *beta + fi; + } + }, + ); + par_invert(input_log_derivatives.as_mut_slice()); + + // TODO: remove last blinders from this + for i in 0..params.n() as usize { + inputs_log_derivatives[i] += input_log_derivatives[i]; + } + } + + // 1 / τ(X) + let mut table_log_derivatives = vec![C::Scalar::ZERO; params.n() as usize]; + parallelize( + &mut table_log_derivatives, + |table_log_derivatives, start| { + for (table_log_derivative, ti) in table_log_derivatives + .iter_mut() + .zip(self.compressed_table_expression[start..].iter()) + { + *table_log_derivative = *beta + ti; + } + }, + ); + + par_invert(table_log_derivatives.as_mut_slice()); + + // (Σ 1/(φ_i(X)) - m(X) / τ(X)) + let mut log_derivatives_diff = vec![C::Scalar::ZERO; params.n() as usize]; + parallelize(&mut log_derivatives_diff, |log_derivatives_diff, start| { + for (((log_derivative_diff, fi), ti), mi) in log_derivatives_diff + .iter_mut() + .zip(inputs_log_derivatives[start..].iter()) + .zip(table_log_derivatives[start..].iter()) + .zip(self.m_values[start..].iter()) + { + // (Σ 1/(φ_i(X)) - m(X) / τ(X)) + *log_derivative_diff = *fi - *mi * *ti; + } + }); + + // Compute the evaluations of the lookup grand sum polynomial + // over our domain, starting with phi[0] = 0 + let blinding_factors = pk.vk.cs.blinding_factors(); + let phi = { + let active_size = params.n() as usize - blinding_factors; + let chunk = { + let num_threads = crate::multicore::current_num_threads(); + let mut chunk = (active_size as usize) / num_threads; + if chunk < num_threads { + chunk = 1; + } + chunk + }; + let num_chunks = (active_size as usize + chunk - 1) / chunk; + let mut segment_sum = vec![C::Scalar::ZERO; num_chunks]; + let mut grand_sum = iter::once(C::Scalar::ZERO) + .chain(log_derivatives_diff) + .take(active_size) + .collect::>(); + // TODO: remove the implicit assumption that parallelize_naive() split the grand_sum + // into segments that each has `chunk` elements except the last. + // !! Do not use `parallelize()` here because it breaks the above assumption. !! + parallelize_naive(&mut grand_sum, |segment_grand_sum, _| { + for i in 1..segment_grand_sum.len() { + segment_grand_sum[i] += segment_grand_sum[i - 1]; + } + }); + for i in 1..segment_sum.len() { + segment_sum[i] = segment_sum[i - 1] + grand_sum[i * chunk - 1]; + } + parallelize_naive(&mut grand_sum, |grand_sum, start| { + let prefix_sum = segment_sum[start / chunk]; + for v in grand_sum.iter_mut() { + *v += prefix_sum; + } + }); + grand_sum + .into_iter() + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>() + }; + assert_eq!(phi.len(), params.n() as usize); + let phi = pk.vk.domain.lagrange_from_vec(phi); + + #[cfg(feature = "sanity-checks")] + // This test works only with intermediate representations in this method. + // It can be used for debugging purposes. + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + + /* + φ_i(X) = f_i(X) + α + τ(X) = t(X) + α + LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + */ + + // q(X) = LHS - RHS mod zH(X) + for i in 0..u { + // Π(φ_i(X)) + let fi_prod = || { + let mut prod = C::Scalar::one(); + for compressed_input_expression in self.compressed_inputs_expressions.iter() { + prod *= *beta + compressed_input_expression[i]; + } + + prod + }; + + let fi_log_derivative = || { + let mut sum = C::Scalar::ZERO; + for compressed_input_expression in self.compressed_inputs_expressions.iter() { + sum += (*beta + compressed_input_expression[i]).invert().unwrap(); + } + + sum + }; + + // LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + let lhs = { + (*beta + self.compressed_table_expression[i]) + * fi_prod() + * (phi[i + 1] - phi[i]) + }; + + // RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + let rhs = { + (*beta + self.compressed_table_expression[i]) + * fi_prod() + * (fi_log_derivative() + - self.m_values[i] + * (*beta + self.compressed_table_expression[i]) + .invert() + .unwrap()) + }; + + assert_eq!(lhs - rhs, C::Scalar::ZERO); + } + + assert_eq!(phi[u], C::Scalar::ZERO); + } + + let grand_sum_blind = Blind(C::Scalar::random(rng)); + let phi_commitment = params.commit_lagrange(&phi, grand_sum_blind).to_affine(); + + // Hash grand sum commitment + transcript.write_point(phi_commitment)?; + + Ok(Committed { + m_poly: pk.vk.domain.lagrange_to_coeff(self.m_values), + phi_poly: pk.vk.domain.lagrange_to_coeff(phi), + }) + } +} + +impl Committed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let phi_eval = eval_polynomial(&self.phi_poly, *x); + let phi_next_eval = eval_polynomial(&self.phi_poly, x_next); + let m_eval = eval_polynomial(&self.m_poly, *x); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(phi_eval)) + .chain(Some(phi_next_eval)) + .chain(Some(m_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.phi_poly, + blind: Blind(C::Scalar::ZERO), + })) + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.phi_poly, + blind: Blind(C::Scalar::ZERO), + })) + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.m_poly, + blind: Blind(C::Scalar::ZERO), + })) + } +} diff --git a/halo2_proofs/src/plonk/mv_lookup/verifier.rs b/halo2_proofs/src/plonk/mv_lookup/verifier.rs new file mode 100644 index 0000000000..7816712f97 --- /dev/null +++ b/halo2_proofs/src/plonk/mv_lookup/verifier.rs @@ -0,0 +1,195 @@ +use std::iter; + +use super::super::{ + circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX, +}; +use super::Argument; +use crate::{ + arithmetic::{CurveAffine, Field}, + plonk::{Error, VerifyingKey}, + poly::{commitment::MSM, Rotation, VerifierQuery}, + transcript::{EncodedChallenge, TranscriptRead}, +}; +use ff::BatchInvert; + +pub struct PreparedCommitments { + m_commitment: C, +} + +pub struct Committed { + prepared: PreparedCommitments, + phi_commitment: C, +} + +pub struct Evaluated { + committed: Committed, + phi_eval: C::Scalar, + phi_next_eval: C::Scalar, + m_eval: C::Scalar, +} + +impl Argument { + pub(in crate::plonk) fn read_prepared_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, + >( + &self, + transcript: &mut T, + ) -> Result, Error> { + let m_commitment = transcript.read_point()?; + + Ok(PreparedCommitments { m_commitment }) + } +} + +impl PreparedCommitments { + pub(in crate::plonk) fn read_grand_sum_commitment< + E: EncodedChallenge, + T: TranscriptRead, + >( + self, + transcript: &mut T, + ) -> Result, Error> { + let phi_commitment = transcript.read_point()?; + + Ok(Committed { + prepared: self, + phi_commitment, + }) + } +} + +impl Committed { + pub(crate) fn evaluate, T: TranscriptRead>( + self, + transcript: &mut T, + ) -> Result, Error> { + let phi_eval = transcript.read_scalar()?; + let phi_next_eval = transcript.read_scalar()?; + let m_eval = transcript.read_scalar()?; + + Ok(Evaluated { + committed: self, + phi_eval, + phi_next_eval, + m_eval, + }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn expressions<'a>( + &'a self, + l_0: C::Scalar, + l_last: C::Scalar, + l_blind: C::Scalar, + argument: &'a Argument, + theta: ChallengeTheta, + beta: ChallengeBeta, + advice_evals: &[C::Scalar], + fixed_evals: &[C::Scalar], + instance_evals: &[C::Scalar], + challenges: &[C::Scalar], + ) -> impl Iterator + 'a { + let active_rows = C::Scalar::ONE - (l_last + l_blind); + + /* + φ_i(X) = f_i(X) + beta + τ(X) = t(X) + beta + LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + */ + + let grand_sum_expression = || { + let compress_expressions = |expressions: &[Expression]| { + expressions + .iter() + .map(|expression| { + expression.evaluate( + &|scalar| scalar, + &|_| panic!("virtual selectors are removed during optimization"), + &|query| fixed_evals[query.index.unwrap()], + &|query| advice_evals[query.index.unwrap()], + &|query| instance_evals[query.index.unwrap()], + &|challenge| challenges[challenge.index()], + &|a| -a, + &|a, b| a + &b, + &|a, b| a * &b, + &|a, scalar| a * &scalar, + ) + }) + .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + }; + + // φ_i(X) = f_i(X) + beta + let mut f_evals: Vec<_> = argument + .inputs_expressions + .iter() + .map(|input_expressions| compress_expressions(input_expressions) + *beta) + .collect(); + + let t_eval = compress_expressions(&argument.table_expressions); + + let tau = t_eval + *beta; + // Π(φ_i(X)) + let prod_fi = f_evals + .iter() + .fold(C::Scalar::ONE, |acc, eval| acc * eval); + // ∑ 1/(φ_i(X)) + let sum_inv_fi = { + f_evals.batch_invert(); + f_evals + .iter() + .fold(C::Scalar::ZERO, |acc, eval| acc + eval) + }; + + // LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) + let lhs = tau * prod_fi * (self.phi_next_eval - self.phi_eval); + + // RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) + let rhs = { tau * prod_fi * (sum_inv_fi - self.m_eval * tau.invert().unwrap()) }; + + (lhs - rhs) * active_rows + }; + + std::iter::empty() + .chain( + // phi[0] = 0 + Some(l_0 * self.phi_eval), + ) + .chain( + // phi[u] = 0 + Some(l_last * self.phi_eval), + ) + .chain( + // (1 - l_last - l_blind) * (lhs - rhs) = 0 + Some(grand_sum_expression()), + ) + } + + pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( + &'r self, + vk: &'r VerifyingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + .chain(Some(VerifierQuery::new_commitment( + &self.committed.phi_commitment, + *x, + self.phi_eval, + ))) + .chain(Some(VerifierQuery::new_commitment( + &self.committed.phi_commitment, + x_next, + self.phi_next_eval, + ))) + .chain(Some(VerifierQuery::new_commitment( + &self.committed.prepared.m_commitment, + *x, + self.m_eval, + ))) + } +} diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index cd0d7306a9..e3eccccdd6 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -11,7 +11,7 @@ use super::{ Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, Instance, Selector, }, - lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, + mv_lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, ProvingKey, }; @@ -51,7 +51,7 @@ pub fn create_proof< transcript: &mut T, ) -> Result<(), Error> where - Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64> + Ord, { if circuits.len() != instances.len() { return Err(Error::InvalidInstances); @@ -424,7 +424,7 @@ where // Sample theta challenge for keeping lookup columns linearly independent let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); - let lookups: Vec>> = instance + let lookups: Vec>> = instance .iter() .zip(advice.iter()) .map(|(instance, advice)| -> Result, Error> { @@ -434,7 +434,7 @@ where .lookups .iter() .map(|lookup| { - lookup.commit_permuted( + lookup.prepare( pk, params, domain, @@ -477,13 +477,13 @@ where }) .collect::, _>>()?; - let lookups: Vec>> = lookups + let lookups: Vec>> = lookups .into_iter() .map(|lookups| -> Result, _> { // Construct and commit to products for each lookup lookups .into_iter() - .map(|lookup| lookup.commit_product(pk, params, beta, gamma, &mut rng, transcript)) + .map(|lookup| lookup.commit_grand_sum(pk, params, beta, &mut rng, transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -636,7 +636,7 @@ where .collect::, _>>()?; // Evaluate the lookups, if any, at omega^i x. - let lookups: Vec>> = lookups + let lookups: Vec>> = lookups .into_iter() .map(|lookups| -> Result, _> { lookups diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_proofs/src/plonk/verifier.rs index 76675bcdfa..5bf873d759 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_proofs/src/plonk/verifier.rs @@ -128,7 +128,7 @@ where vk.cs .lookups .iter() - .map(|argument| argument.read_permuted_commitments(transcript)) + .map(|argument| argument.read_prepared_commitments(transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -149,10 +149,10 @@ where let lookups_committed = lookups_permuted .into_iter() .map(|lookups| { - // Hash each lookup product commitment + // Hash each lookup sum commitment lookups .into_iter() - .map(|lookup| lookup.read_product_commitment(transcript)) + .map(|lookup| lookup.read_grand_sum_commitment(transcript)) .collect::, _>>() }) .collect::, _>>()?; @@ -331,7 +331,6 @@ where argument, theta, beta, - gamma, advice_evals, fixed_evals, instance_evals, diff --git a/halo2_proofs/src/poly.rs b/halo2_proofs/src/poly.rs index 9cb6b149bc..37c9d77e89 100644 --- a/halo2_proofs/src/poly.rs +++ b/halo2_proofs/src/poly.rs @@ -303,7 +303,7 @@ impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { /// Describes the relative rotation of a vector. Negative numbers represent /// reverse (leftmost) rotations and positive numbers represent forward (rightmost) /// rotations. Zero represents no rotation. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Rotation(pub i32); impl Rotation { diff --git a/halo2_proofs/tests/plonk_api.rs b/halo2_proofs/tests/plonk_api.rs index 28ffb399ff..924cde0740 100644 --- a/halo2_proofs/tests/plonk_api.rs +++ b/halo2_proofs/tests/plonk_api.rs @@ -295,6 +295,13 @@ fn plonk_api() { let sp = meta.fixed_column(); let sl = meta.lookup_table_column(); + // Add to test mvlookup + let dummy = meta.complex_selector(); + let dummy_2 = meta.complex_selector(); + let dummy_3 = meta.complex_selector(); + + let dummy_table = meta.lookup_table_column(); + /* * A B ... sl * [ @@ -315,6 +322,21 @@ fn plonk_api() { let a_ = meta.query_any(a, Rotation::cur()); vec![(a_, sl)] }); + + // Add to test mvlookup + meta.lookup("lookup_same", |meta| { + let a_ = meta.query_any(a, Rotation::cur()); + vec![(a_, sl)] + }); + + meta.lookup("lookup_same", |meta| { + let b_ = meta.query_any(b, Rotation::cur()); + let dummy = meta.query_selector(dummy); + let dummy_2 = meta.query_selector(dummy_2); + let dummy_3 = meta.query_selector(dummy_3); + + vec![(dummy * dummy_2 * dummy_3 * b_, dummy_table)] + }); meta.create_gate("Combined add-mult", |meta| { let d = meta.query_advice(d, Rotation::next()); @@ -589,6 +611,8 @@ fn plonk_api() { >(verifier_params, pk.get_vk(), &proof[..]); } + // TODO: fix the ipa test + /* fn test_plonk_api_ipa() { use halo2_proofs::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA}; use halo2_proofs::poly::ipa::multiopen::{ProverIPA, VerifierIPA}; @@ -616,411 +640,11 @@ fn plonk_api() { Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>, >(verifier_params, pk.get_vk(), &proof[..]); - - // Check that the verification key has not changed unexpectedly - { - //panic!("{:#?}", pk.get_vk().pinned()); - assert_eq!( - format!("{:#?}", pk.get_vk().pinned()), - r#####"PinnedVerificationKey { - base_modulus: "0x40000000000000000000000000000000224698fc0994a8dd8c46eb2100000001", - scalar_modulus: "0x40000000000000000000000000000000224698fc094cf91b992d30ed00000001", - domain: PinnedEvaluationDomain { - k: 5, - extended_k: 7, - omega: 0x0cc3380dc616f2e1daf29ad1560833ed3baea3393eceb7bc8fa36376929b78cc, - }, - cs: PinnedConstraintSystem { - num_fixed_columns: 7, - num_advice_columns: 5, - num_instance_columns: 1, - num_selectors: 0, - gates: [ - Sum( - Sum( - Sum( - Sum( - Product( - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, - ), - }, - Fixed { - query_index: 2, - column_index: 2, - rotation: Rotation( - 0, - ), - }, - ), - Product( - Advice { - query_index: 1, - column_index: 2, - rotation: Rotation( - 0, - ), - }, - Fixed { - query_index: 3, - column_index: 3, - rotation: Rotation( - 0, - ), - }, - ), - ), - Product( - Product( - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, - ), - }, - Advice { - query_index: 1, - column_index: 2, - rotation: Rotation( - 0, - ), - }, - ), - Fixed { - query_index: 5, - column_index: 1, - rotation: Rotation( - 0, - ), - }, - ), - ), - Negated( - Product( - Advice { - query_index: 2, - column_index: 3, - rotation: Rotation( - 0, - ), - }, - Fixed { - query_index: 4, - column_index: 4, - rotation: Rotation( - 0, - ), - }, - ), - ), - ), - Product( - Fixed { - query_index: 1, - column_index: 0, - rotation: Rotation( - 0, - ), - }, - Product( - Advice { - query_index: 3, - column_index: 4, - rotation: Rotation( - 1, - ), - }, - Advice { - query_index: 4, - column_index: 0, - rotation: Rotation( - -1, - ), - }, - ), - ), - ), - Product( - Fixed { - query_index: 6, - column_index: 5, - rotation: Rotation( - 0, - ), - }, - Sum( - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, - ), - }, - Negated( - Instance { - query_index: 0, - column_index: 0, - rotation: Rotation( - 0, - ), - }, - ), - ), - ), - ], - advice_queries: [ - ( - Column { - index: 1, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 2, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 3, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 4, - column_type: Advice, - }, - Rotation( - 1, - ), - ), - ( - Column { - index: 0, - column_type: Advice, - }, - Rotation( - -1, - ), - ), - ( - Column { - index: 0, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 4, - column_type: Advice, - }, - Rotation( - 0, - ), - ), - ], - instance_queries: [ - ( - Column { - index: 0, - column_type: Instance, - }, - Rotation( - 0, - ), - ), - ], - fixed_queries: [ - ( - Column { - index: 6, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 0, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 2, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 3, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 4, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 1, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ( - Column { - index: 5, - column_type: Fixed, - }, - Rotation( - 0, - ), - ), - ], - permutation: Argument { - columns: [ - Column { - index: 1, - column_type: Advice, - }, - Column { - index: 2, - column_type: Advice, - }, - Column { - index: 3, - column_type: Advice, - }, - Column { - index: 0, - column_type: Fixed, - }, - Column { - index: 0, - column_type: Advice, - }, - Column { - index: 4, - column_type: Advice, - }, - Column { - index: 0, - column_type: Instance, - }, - Column { - index: 1, - column_type: Fixed, - }, - Column { - index: 2, - column_type: Fixed, - }, - Column { - index: 3, - column_type: Fixed, - }, - Column { - index: 4, - column_type: Fixed, - }, - Column { - index: 5, - column_type: Fixed, - }, - ], - }, - lookups: [ - Argument { - input_expressions: [ - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, - ), - }, - ], - table_expressions: [ - Fixed { - query_index: 0, - column_index: 6, - rotation: Rotation( - 0, - ), - }, - ], - }, - ], - constants: [], - minimum_degree: None, - }, - fixed_commitments: [ - (0x2bbc94ef7b22aebef24f9a4b0cc1831882548b605171366017d45c3e6fd92075, 0x082b801a6e176239943bfb759fb02138f47a5c8cc4aa7fa0af559fde4e3abd97), - (0x2bf5082b105b2156ed0e9c5b8e42bf2a240b058f74a464d080e9585274dd1e84, 0x222ad83cee7777e7a160585e212140e5e770dd8d1df788d869b5ee483a5864fb), - (0x374a656456a0aae7429b23336f825752b575dd5a44290ff614946ee59d6a20c0, 0x054491e187e6e3460e7601fb54ae10836d34d420026f96316f0c5c62f86db9b8), - (0x374a656456a0aae7429b23336f825752b575dd5a44290ff614946ee59d6a20c0, 0x054491e187e6e3460e7601fb54ae10836d34d420026f96316f0c5c62f86db9b8), - (0x02e62cd68370b13711139a08cbcdd889e800a272b9ea10acc90880fff9d89199, 0x1a96c468cb0ce77065d3a58f1e55fea9b72d15e44c01bba1e110bd0cbc6e9bc6), - (0x224ef42758215157d3ee48fb8d769da5bddd35e5929a90a4a89736f5c4b5ae9b, 0x11bc3a1e08eb320cde764f1492ecef956d71e996e2165f7a9a30ad2febb511c1), - (0x2d5415bf917fcac32bfb705f8ca35cb12d9bad52aa33ccca747350f9235d3a18, 0x2b2921f815fad504052512743963ef20ed5b401d20627793b006413e73fe4dd4), - ], - permutation: VerifyingKey { - commitments: [ - (0x1347b4b385837977a96b87f199c6a9a81520015539d1e8fa79429bb4ca229a00, 0x2168e404cabef513654d6ff516cde73f0ba87e3dc84e4b940ed675b5f66f3884), - (0x0e6d69cd2455ec43be640f6397ed65c9e51b1d8c0fd2216339314ff37ade122a, 0x222ed6dc8cfc9ea26dcc10b9d4add791ada60f2b5a63ee1e4635f88aa0c96654), - (0x13c447846f48c41a5e0675ccf88ebc0cdef2c96c51446d037acb866d24255785, 0x1f0b5414fc5e8219dbfab996eed6129d831488b2386a8b1a63663938903bd63a), - (0x1aae6470aa662b8fda003894ddef5fedd03af318b3231683039d2fac9cab05b9, 0x08832d91ae69e99cd07d096c7a4a284a69e6a16227cbb07932a0cdc56914f3a6), - (0x0850521b0f8ac7dd0550fe3e25c840837076e9635067ed623b81d5cbac5944d9, 0x0c25d65d1038d0a92c72e5fccd96c1caf07801c3c8233290bb292e0c38c256fa), - (0x12febcf696badd970750eabf75dd3ced4c2f54f93519bcee23849025177d2014, 0x0a05ab3cd42c9fbcc1bbfcf9269951640cc9920761c87cf8e211ba73c8d9f90f), - (0x053904bdde8cfead3b517bb4f6ded3e699f8b94ca6156a9dd2f92a2a05a7ec5a, 0x16753ff97c0d82ff586bb7a07bf7f27a92df90b3617fa5e75d4f55c3b0ef8711), - (0x3804548f6816452747a5b542fa5656353fc989db40d69e9e27d6f973b5deebb0, 0x389a44d5037866dd83993af75831a5f90a18ad5244255aa5bd2c922cc5853055), - (0x003a9f9ca71c7c0b832c802220915f6fc8d840162bdde6b0ea05d25fb95559e3, 0x091247ca19d6b73887cd7f68908cbf0db0b47459b7c82276bbdb8a1c937e2438), - (0x3eaa38689d9e391c8a8fafab9568f20c45816321d38f309d4cc37f4b1601af72, 0x247f8270a462ea88450221a56aa6b55d2bc352b80b03501e99ea983251ceea13), - (0x394437571f9de32dccdc546fd4737772d8d92593c85438aa3473243997d5acc8, 0x14924ec6e3174f1fab7f0ce7070c22f04bbd0a0ecebdfc5c94be857f25493e95), - (0x3d907e0591343bd285c2c846f3e871a6ac70d80ec29e9500b8cb57f544e60202, 0x1034e48df35830244cabea076be8a16d67d7896e27c6ac22b285d017105da9c3), - ], - }, -}"##### - ); - } } test_plonk_api_ipa(); + */ + test_plonk_api_gwc(); test_plonk_api_shplonk(); } From 6444337dedaef84db1ff79a35bb6ca949f1ed66d Mon Sep 17 00:00:00 2001 From: Aleksei Vambol <77882392+AlekseiVambol@users.noreply.github.com> Date: Sun, 3 Mar 2024 15:42:25 +0200 Subject: [PATCH 2/6] Delete halo2_proofs/src/plonk/lookup directory --- halo2_proofs/src/plonk/lookup/prover.rs | 475 ---------------------- halo2_proofs/src/plonk/lookup/verifier.rs | 211 ---------- 2 files changed, 686 deletions(-) delete mode 100644 halo2_proofs/src/plonk/lookup/prover.rs delete mode 100644 halo2_proofs/src/plonk/lookup/verifier.rs diff --git a/halo2_proofs/src/plonk/lookup/prover.rs b/halo2_proofs/src/plonk/lookup/prover.rs deleted file mode 100644 index 028b298853..0000000000 --- a/halo2_proofs/src/plonk/lookup/prover.rs +++ /dev/null @@ -1,475 +0,0 @@ -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, - ProvingKey, -}; -use super::Argument; -use crate::plonk::evaluation::evaluate; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - poly::{ - commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use ff::WithSmallOrderMulGroup; -use group::{ - ff::{BatchInvert, Field}, - Curve, -}; -use rand_core::RngCore; -use std::{ - collections::BTreeMap, - iter, - ops::{Mul, MulAssign}, -}; - -#[derive(Debug)] -pub(in crate::plonk) struct Permuted { - compressed_input_expression: Polynomial, - permuted_input_expression: Polynomial, - permuted_input_poly: Polynomial, - permuted_input_blind: Blind, - compressed_table_expression: Polynomial, - permuted_table_expression: Polynomial, - permuted_table_poly: Polynomial, - permuted_table_blind: Blind, -} - -#[derive(Debug)] -pub(in crate::plonk) struct Committed { - pub(in crate::plonk) permuted_input_poly: Polynomial, - permuted_input_blind: Blind, - pub(in crate::plonk) permuted_table_poly: Polynomial, - permuted_table_blind: Blind, - pub(in crate::plonk) product_poly: Polynomial, - product_blind: Blind, -} - -pub(in crate::plonk) struct Evaluated { - constructed: Committed, -} - -impl> Argument { - /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, - /// obtaining A' and S', and - /// - constructs Permuted struct using permuted_input_value = A', and - /// permuted_table_expression = S'. - /// The Permuted struct is used to update the Lookup, and is then returned. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_permuted< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the lookup and compress them - let compressed_input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the lookup and compress them - let compressed_table_expression = compress_expressions(&self.table_expressions); - - // Permute compressed (InputExpression, TableExpression) pair - let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( - pk, - params, - domain, - &mut rng, - &compressed_input_expression, - &compressed_table_expression, - )?; - - // Closure to construct commitment to vector of values - let mut commit_values = |values: &Polynomial| { - let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); - let blind = Blind(C::Scalar::random(&mut rng)); - let commitment = params.commit_lagrange(values, blind).to_affine(); - (poly, blind, commitment) - }; - - // Commit to permuted input expression - let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = - commit_values(&permuted_input_expression); - - // Commit to permuted table expression - let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = - commit_values(&permuted_table_expression); - - // Hash permuted input commitment - transcript.write_point(permuted_input_commitment)?; - - // Hash permuted table commitment - transcript.write_point(permuted_table_commitment)?; - - Ok(Permuted { - compressed_input_expression, - permuted_input_expression, - permuted_input_poly, - permuted_input_blind, - compressed_table_expression, - permuted_table_expression, - permuted_table_poly, - permuted_table_blind, - }) - } -} - -impl Permuted { - /// Given a Lookup with input expressions, table expressions, and the permuted - /// input expression and permuted table expression, this method constructs the - /// grand product polynomial over the lookup. The grand product polynomial - /// is used to populate the Product struct. The Product struct is - /// added to the Lookup and finally returned by the method. - pub(in crate::plonk) fn commit_product< - 'params, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - self, - pk: &ProvingKey, - params: &P, - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - // Goal is to compute the products of fractions - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where a_j(X) is the jth input expression in this lookup, - // where a'(X) is the compression of the permuted input expressions, - // s_j(X) is the jth table expression in this lookup, - // s'(X) is the compression of the permuted table expressions, - // and i is the ith row of the expression. - let mut lookup_product = vec![C::Scalar::ZERO; params.n() as usize]; - // Denominator uses the permuted input expression and permuted table expression - parallelize(&mut lookup_product, |lookup_product, start| { - for ((lookup_product, permuted_input_value), permuted_table_value) in lookup_product - .iter_mut() - .zip(self.permuted_input_expression[start..].iter()) - .zip(self.permuted_table_expression[start..].iter()) - { - *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); - } - }); - - // Batch invert to obtain the denominators for the lookup product - // polynomials - lookup_product.iter_mut().batch_invert(); - - // Finish the computation of the entire fraction by computing the numerators - // (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - parallelize(&mut lookup_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - - *product *= &(self.compressed_input_expression[i] + &*beta); - *product *= &(self.compressed_table_expression[i] + &*gamma); - } - }); - - // The product vector is a vector of products of fractions of the form - // - // Numerator: (\theta^{m-1} a_0(\omega^i) + \theta^{m-2} a_1(\omega^i) + ... + \theta a_{m-2}(\omega^i) + a_{m-1}(\omega^i) + \beta) - // * (\theta^{m-1} s_0(\omega^i) + \theta^{m-2} s_1(\omega^i) + ... + \theta s_{m-2}(\omega^i) + s_{m-1}(\omega^i) + \gamma) - // Denominator: (a'(\omega^i) + \beta) (s'(\omega^i) + \gamma) - // - // where there are m input expressions and m table expressions, - // a_j(\omega^i) is the jth input expression in this lookup, - // a'j(\omega^i) is the permuted input expression, - // s_j(\omega^i) is the jth table expression in this lookup, - // s'(\omega^i) is the permuted table expression, - // and i is the ith row of the expression. - - // Compute the evaluations of the lookup product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(lookup_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - // This test works only with intermediate representations in this method. - // It can be used for debugging purposes. - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - - // l_0(X) * (1 - z(X)) = 0 - assert_eq!(z[0], C::Scalar::ONE); - - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - for i in 0..u { - let mut left = z[i + 1]; - let permuted_input_value = &self.permuted_input_expression[i]; - - let permuted_table_value = &self.permuted_table_expression[i]; - - left *= &(*beta + permuted_input_value); - left *= &(*gamma + permuted_table_value); - - let mut right = z[i]; - let mut input_term = self.compressed_input_expression[i]; - let mut table_term = self.compressed_table_expression[i]; - - input_term += &(*beta); - table_term += &(*gamma); - right *= &(input_term * &table_term); - - assert_eq!(left, right); - } - - // l_last(X) * (z(X)^2 - z(X)) = 0 - // Assertion will fail only when soundness is broken, in which - // case this z[u] value will be zero. (bad!) - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - permuted_input_poly: self.permuted_input_poly, - permuted_input_blind: self.permuted_input_blind, - permuted_table_poly: self.permuted_table_poly, - permuted_table_blind: self.permuted_table_blind, - product_poly: z, - product_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_inv = domain.rotate_omega(*x, Rotation::prev()); - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - let permuted_input_eval = eval_polynomial(&self.permuted_input_poly, *x); - let permuted_input_inv_eval = eval_polynomial(&self.permuted_input_poly, x_inv); - let permuted_table_eval = eval_polynomial(&self.permuted_table_poly, *x); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - .chain(Some(permuted_input_eval)) - .chain(Some(permuted_input_inv_eval)) - .chain(Some(permuted_table_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = pk.vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open lookup input commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup table commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.permuted_table_poly, - blind: self.constructed.permuted_table_blind, - })) - // Open lookup input commitments at x_inv - .chain(Some(ProverQuery { - point: x_inv, - poly: &self.constructed.permuted_input_poly, - blind: self.constructed.permuted_input_blind, - })) - // Open lookup product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } -} - -type ExpressionPair = (Polynomial, Polynomial); - -/// Given a vector of input values A and a vector of table values S, -/// this method permutes A and S to produce A' and S', such that: -/// - like values in A' are vertically adjacent to each other; and -/// - the first row in a sequence of like values in A' is the row -/// that has the corresponding value in S'. -/// This method returns (A', S') if no errors are encountered. -fn permute_expression_pair<'params, C: CurveAffine, P: Params<'params, C>, R: RngCore>( - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - mut rng: R, - input_expression: &Polynomial, - table_expression: &Polynomial, -) -> Result, Error> { - let blinding_factors = pk.vk.cs.blinding_factors(); - let usable_rows = params.n() as usize - (blinding_factors + 1); - - let mut permuted_input_expression: Vec = input_expression.to_vec(); - permuted_input_expression.truncate(usable_rows); - - // Sort input lookup expression values - permuted_input_expression.sort(); - - // A BTreeMap of each unique element in the table expression and its count - let mut leftover_table_map: BTreeMap = table_expression - .iter() - .take(usable_rows) - .fold(BTreeMap::new(), |mut acc, coeff| { - *acc.entry(*coeff).or_insert(0) += 1; - acc - }); - let mut permuted_table_coeffs = vec![C::Scalar::ZERO; usable_rows]; - - let mut repeated_input_rows = permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter_mut()) - .enumerate() - .filter_map(|(row, (input_value, table_value))| { - // If this is the first occurrence of `input_value` in the input expression - if row == 0 || *input_value != permuted_input_expression[row - 1] { - *table_value = *input_value; - // Remove one instance of input_value from leftover_table_map - if let Some(count) = leftover_table_map.get_mut(input_value) { - assert!(*count > 0); - *count -= 1; - None - } else { - // Return error if input_value not found - Some(Err(Error::ConstraintSystemFailure)) - } - // If input value is repeated - } else { - Some(Ok(row)) - } - }) - .collect::, _>>()?; - - // Populate permuted table at unfilled rows with leftover table elements - for (coeff, count) in leftover_table_map.iter() { - for _ in 0..*count { - permuted_table_coeffs[repeated_input_rows.pop().unwrap()] = *coeff; - } - } - assert!(repeated_input_rows.is_empty()); - - permuted_input_expression - .extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - permuted_table_coeffs.extend((0..(blinding_factors + 1)).map(|_| C::Scalar::random(&mut rng))); - assert_eq!(permuted_input_expression.len(), params.n() as usize); - assert_eq!(permuted_table_coeffs.len(), params.n() as usize); - - #[cfg(feature = "sanity-checks")] - { - let mut last = None; - for (a, b) in permuted_input_expression - .iter() - .zip(permuted_table_coeffs.iter()) - .take(usable_rows) - { - if *a != *b { - assert_eq!(*a, last.unwrap()); - } - last = Some(*a); - } - } - - Ok(( - domain.lagrange_from_vec(permuted_input_expression), - domain.lagrange_from_vec(permuted_table_coeffs), - )) -} diff --git a/halo2_proofs/src/plonk/lookup/verifier.rs b/halo2_proofs/src/plonk/lookup/verifier.rs deleted file mode 100644 index bbc86c8e9d..0000000000 --- a/halo2_proofs/src/plonk/lookup/verifier.rs +++ /dev/null @@ -1,211 +0,0 @@ -use std::iter; - -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, -}; -use super::Argument; -use crate::{ - arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, Rotation, VerifierQuery}, - transcript::{EncodedChallenge, TranscriptRead}, -}; -use ff::Field; - -pub struct PermutationCommitments { - permuted_input_commitment: C, - permuted_table_commitment: C, -} - -pub struct Committed { - permuted: PermutationCommitments, - product_commitment: C, -} - -pub struct Evaluated { - committed: Committed, - product_eval: C::Scalar, - product_next_eval: C::Scalar, - permuted_input_eval: C::Scalar, - permuted_input_inv_eval: C::Scalar, - permuted_table_eval: C::Scalar, -} - -impl Argument { - pub(in crate::plonk) fn read_permuted_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let permuted_input_commitment = transcript.read_point()?; - let permuted_table_commitment = transcript.read_point()?; - - Ok(PermutationCommitments { - permuted_input_commitment, - permuted_table_commitment, - }) - } -} - -impl PermutationCommitments { - pub(in crate::plonk) fn read_product_commitment< - E: EncodedChallenge, - T: TranscriptRead, - >( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_commitment = transcript.read_point()?; - - Ok(Committed { - permuted: self, - product_commitment, - }) - } -} - -impl Committed { - pub(crate) fn evaluate, T: TranscriptRead>( - self, - transcript: &mut T, - ) -> Result, Error> { - let product_eval = transcript.read_scalar()?; - let product_next_eval = transcript.read_scalar()?; - let permuted_input_eval = transcript.read_scalar()?; - let permuted_input_inv_eval = transcript.read_scalar()?; - let permuted_table_eval = transcript.read_scalar()?; - - Ok(Evaluated { - committed: self, - product_eval, - product_next_eval, - permuted_input_eval, - permuted_input_inv_eval, - permuted_table_eval, - }) - } -} - -impl Evaluated { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn expressions<'a>( - &'a self, - l_0: C::Scalar, - l_last: C::Scalar, - l_blind: C::Scalar, - argument: &'a Argument, - theta: ChallengeTheta, - beta: ChallengeBeta, - gamma: ChallengeGamma, - advice_evals: &[C::Scalar], - fixed_evals: &[C::Scalar], - instance_evals: &[C::Scalar], - challenges: &[C::Scalar], - ) -> impl Iterator + 'a { - let active_rows = C::Scalar::ONE - (l_last + l_blind); - - let product_expression = || { - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - let left = self.product_next_eval - * &(self.permuted_input_eval + &*beta) - * &(self.permuted_table_eval + &*gamma); - - let compress_expressions = |expressions: &[Expression]| { - expressions - .iter() - .map(|expression| { - expression.evaluate( - &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) - }; - let right = self.product_eval - * &(compress_expressions(&argument.input_expressions) + &*beta) - * &(compress_expressions(&argument.table_expressions) + &*gamma); - - (left - &right) * &active_rows - }; - - std::iter::empty() - .chain( - // l_0(X) * (1 - z(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), - ) - .chain( - // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), - ) - .chain( - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - Some(product_expression()), - ) - .chain(Some( - // l_0(X) * (a'(X) - s'(X)) = 0 - l_0 * &(self.permuted_input_eval - &self.permuted_table_eval), - )) - .chain(Some( - // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - (self.permuted_input_eval - &self.permuted_table_eval) - * &(self.permuted_input_eval - &self.permuted_input_inv_eval) - * &active_rows, - )) - } - - pub(in crate::plonk) fn queries<'r, M: MSM + 'r>( - &'r self, - vk: &'r VerifyingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_inv = vk.domain.rotate_omega(*x, Rotation::prev()); - let x_next = vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open lookup product commitment at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - *x, - self.product_eval, - ))) - // Open lookup input commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - *x, - self.permuted_input_eval, - ))) - // Open lookup table commitments at x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_table_commitment, - *x, - self.permuted_table_eval, - ))) - // Open lookup input commitments at \omega^{-1} x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.permuted.permuted_input_commitment, - x_inv, - self.permuted_input_inv_eval, - ))) - // Open lookup product commitment at \omega x - .chain(Some(VerifierQuery::new_commitment( - &self.committed.product_commitment, - x_next, - self.product_next_eval, - ))) - } -} From a45d3d4377a927b5619791e46528eaa35cf284a3 Mon Sep 17 00:00:00 2001 From: Aleksei Vambol <77882392+AlekseiVambol@users.noreply.github.com> Date: Sun, 3 Mar 2024 15:42:40 +0200 Subject: [PATCH 3/6] Delete halo2_proofs/src/plonk/lookup.rs --- halo2_proofs/src/plonk/lookup.rs | 99 -------------------------------- 1 file changed, 99 deletions(-) delete mode 100644 halo2_proofs/src/plonk/lookup.rs diff --git a/halo2_proofs/src/plonk/lookup.rs b/halo2_proofs/src/plonk/lookup.rs deleted file mode 100644 index a7c4f68af2..0000000000 --- a/halo2_proofs/src/plonk/lookup.rs +++ /dev/null @@ -1,99 +0,0 @@ -use super::circuit::Expression; -use ff::Field; -use std::fmt::{self, Debug}; - -pub(crate) mod prover; -pub(crate) mod verifier; - -#[derive(Clone)] -pub struct Argument { - pub(crate) name: String, - pub(crate) input_expressions: Vec>, - pub(crate) table_expressions: Vec>, -} - -impl Debug for Argument { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Argument") - .field("input_expressions", &self.input_expressions) - .field("table_expressions", &self.table_expressions) - .finish() - } -} - -impl Argument { - /// Constructs a new lookup argument. - /// - /// `table_map` is a sequence of `(input, table)` tuples. - pub fn new>(name: S, table_map: Vec<(Expression, Expression)>) -> Self { - let (input_expressions, table_expressions) = table_map.into_iter().unzip(); - Argument { - name: name.as_ref().to_string(), - input_expressions, - table_expressions, - } - } - - pub(crate) fn required_degree(&self) -> usize { - assert_eq!(self.input_expressions.len(), self.table_expressions.len()); - - // The first value in the permutation poly should be one. - // degree 2: - // l_0(X) * (1 - z(X)) = 0 - // - // The "last" value in the permutation poly should be a boolean, for - // completeness and soundness. - // degree 3: - // l_last(X) * (z(X)^2 - z(X)) = 0 - // - // Enable the permutation argument for only the rows involved. - // degree (2 + input_degree + table_degree) or 4, whichever is larger: - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - // ) = 0 - // - // The first two values of a' and s' should be the same. - // degree 2: - // l_0(X) * (a'(X) - s'(X)) = 0 - // - // Either the two values are the same, or the previous - // value of a' is the same as the current value. - // degree 3: - // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - let mut input_degree = 1; - for expr in self.input_expressions.iter() { - input_degree = std::cmp::max(input_degree, expr.degree()); - } - let mut table_degree = 1; - for expr in self.table_expressions.iter() { - table_degree = std::cmp::max(table_degree, expr.degree()); - } - - // In practice because input_degree and table_degree are initialized to - // one, the latter half of this max() invocation is at least 4 always, - // rendering this call pointless except to be explicit in case we change - // the initialization of input_degree/table_degree in the future. - std::cmp::max( - // (1 - (l_last + l_blind)) z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) - 4, - // (1 - (l_last + l_blind)) z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) - 2 + input_degree + table_degree, - ) - } - - /// Returns input of this argument - pub fn input_expressions(&self) -> &Vec> { - &self.input_expressions - } - - /// Returns table of this argument - pub fn table_expressions(&self) -> &Vec> { - &self.table_expressions - } - - /// Returns name of this argument - pub fn name(&self) -> &str { - &self.name - } -} From 5b7abca800bda7699d8108dceb41bc50681b4236 Mon Sep 17 00:00:00 2001 From: AlekseiVambol Date: Sun, 3 Mar 2024 23:33:42 +0200 Subject: [PATCH 4/6] Formatting the code --- halo2_proofs/benches/lookups.rs | 14 ++-- halo2_proofs/src/dev/failure.rs | 16 ++++- halo2_proofs/src/plonk/circuit.rs | 4 +- halo2_proofs/src/plonk/evaluation.rs | 75 ++++++++++---------- halo2_proofs/src/plonk/mv_lookup/prover.rs | 12 ++-- halo2_proofs/src/plonk/mv_lookup/verifier.rs | 12 +--- halo2_proofs/tests/plonk_api.rs | 8 +-- 7 files changed, 72 insertions(+), 69 deletions(-) diff --git a/halo2_proofs/benches/lookups.rs b/halo2_proofs/benches/lookups.rs index bf488fb974..5f8beed041 100644 --- a/halo2_proofs/benches/lookups.rs +++ b/halo2_proofs/benches/lookups.rs @@ -12,12 +12,11 @@ use halo2curves::pairing::Engine; use rand_core::OsRng; use halo2_proofs::{ - poly:: - kzg::{ - commitment::{KZGCommitmentScheme, ParamsKZG}, - multiopen::ProverGWC, - strategy::SingleStrategy, - }, + poly::kzg::{ + commitment::{KZGCommitmentScheme, ParamsKZG}, + multiopen::ProverGWC, + strategy::SingleStrategy, + }, transcript::{TranscriptReadBuffer, TranscriptWriterBuffer}, }; @@ -59,7 +58,8 @@ fn criterion_benchmark(c: &mut Criterion) { meta.create_gate("degree 6 gate", |meta| { let dummy_selector = meta.query_selector(dummy_selector); - let constraints = vec![dummy_selector.clone(); 4].iter() + let constraints = vec![dummy_selector.clone(); 4] + .iter() .fold(dummy_selector.clone(), |acc, val| acc * val.clone()); Constraints::with_selector(dummy_selector, Some(constraints)) }); diff --git a/halo2_proofs/src/dev/failure.rs b/halo2_proofs/src/dev/failure.rs index d0414b63cd..db84bd68b3 100644 --- a/halo2_proofs/src/dev/failure.rs +++ b/halo2_proofs/src/dev/failure.rs @@ -609,9 +609,19 @@ fn render_lookup( let cell_values = input.evaluate( &|_| BTreeMap::default(), &|_| panic!("virtual selectors are removed during optimization"), - &cell_value(&util::load(n, row, &cs.fixed_queries, prover.fixed.as_slice())), + &cell_value(&util::load( + n, + row, + &cs.fixed_queries, + prover.fixed.as_slice(), + )), &cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)), - &cell_value(&util::load_instance(n, row, &cs.instance_queries, &prover.instance)), + &cell_value(&util::load_instance( + n, + row, + &cs.instance_queries, + &prover.instance, + )), &|_| BTreeMap::default(), &|a| a, &|mut a, mut b| { @@ -638,7 +648,7 @@ fn render_lookup( .or_default() .entry(cell.column) .or_insert(format!("x{}", i)); - } + } if i != 0 { eprintln!(); } diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs index 165693589d..c2852a3c4c 100644 --- a/halo2_proofs/src/plonk/circuit.rs +++ b/halo2_proofs/src/plonk/circuit.rs @@ -1,4 +1,3 @@ -use std::collections::BTreeMap; use super::{mv_lookup, permutation, shuffle, Assigned, Error}; use crate::circuit::layouter::SyncDeps; use crate::dev::metadata; @@ -10,6 +9,7 @@ use core::cmp::max; use core::ops::{Add, Mul}; use ff::Field; use sealed::SealedPhase; +use std::collections::BTreeMap; use std::collections::HashMap; use std::fmt::Debug; use std::iter::{Product, Sum}; @@ -1825,7 +1825,7 @@ impl ConstraintSystem { for input in inputs.iter().skip(1) { let cur_input_degree = input.iter().map(|expr| expr.degree()).max().unwrap(); let mut indicator = false; - for arg in args.iter_mut() { + for arg in args.iter_mut() { // try to fit input in one of the args let cur_argument_degree = arg.required_degree(); let new_potential_degree = cur_argument_degree + cur_input_degree; diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_proofs/src/plonk/evaluation.rs index b92327ea37..a7662d192e 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_proofs/src/plonk/evaluation.rs @@ -1,3 +1,4 @@ +use super::{shuffle, ConstraintSystem, Expression}; use crate::multicore; use crate::plonk::{mv_lookup, permutation, Any, ProvingKey}; use crate::poly::Basis; @@ -6,10 +7,12 @@ use crate::{ poly::{Coeff, ExtendedLagrangeCoeff, Polynomial, Rotation}, }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; -use super::{shuffle, ConstraintSystem, Expression}; #[cfg(not(feature = "logup_skip_inv"))] -use rayon::{slice::ParallelSlice, prelude::{ParallelIterator, IntoParallelIterator}}; +use rayon::{ + prelude::{IntoParallelIterator, ParallelIterator}, + slice::ParallelSlice, +}; #[cfg(not(feature = "logup_skip_inv"))] use crate::arithmetic::par_invert; @@ -269,7 +272,7 @@ impl Evaluator { compressed_table_coset, ValueSource::Beta(), )); - /* + /* a) f_i + beta b) t + beta */ @@ -482,7 +485,6 @@ impl Evaluator { }); } - // For lookups, compute inputs_inv_sum = ∑ 1 / (f_i(X) + beta) // The outer vector has capacity self.lookups.len() #[cfg(not(feature = "logup_skip_inv"))] @@ -497,50 +499,47 @@ impl Evaluator { .map(|idx| { let mut inputs_eval_data: Vec<_> = inputs_lookup_evaluator .iter() - .map(|input_lookup_evaluator| { - input_lookup_evaluator.instance() - }) + .map(|input_lookup_evaluator| input_lookup_evaluator.instance()) .collect(); inputs_lookup_evaluator - .iter() - .zip(inputs_eval_data.iter_mut()) - .map(|(input_lookup_evaluator, input_eval_data)| { - input_lookup_evaluator.evaluate( - input_eval_data, - fixed, - advice, - instance, - challenges, - &beta, - &gamma, - &theta, - &y, - &C::ScalarExt::ZERO, - idx, - rot_scale, - isize, - ) - }) - .collect() - }) - .collect(); + .iter() + .zip(inputs_eval_data.iter_mut()) + .map(|(input_lookup_evaluator, input_eval_data)| { + input_lookup_evaluator.evaluate( + input_eval_data, + fixed, + advice, + instance, + challenges, + &beta, + &gamma, + &theta, + &y, + &C::ScalarExt::ZERO, + idx, + rot_scale, + isize, + ) + }) + .collect() + }) + .collect(); let mut inputs_values_for_extended_domain: Vec = inputs_values_for_extended_domain - .into_iter() - .flatten() - .collect(); + .into_iter() + .flatten() + .collect(); par_invert(&mut inputs_values_for_extended_domain); let inputs_len = inputs_lookup_evaluator.len(); inputs_values_for_extended_domain - .par_chunks_exact(inputs_len) - .map(|values| values.iter().sum()) - .collect::>() - + .par_chunks_exact(inputs_len) + .map(|values| values.iter().sum()) + .collect::>() }) .collect(); @@ -560,7 +559,7 @@ impl Evaluator { LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) RHS = τ(X) * Π(φ_i(X)) * (∑ 1/(φ_i(X)) - m(X) / τ(X)))) (1) = (τ(X) * Π(φ_i(X)) * ∑ 1/(φ_i(X))) - Π(φ_i(X)) * m(X) - = Π(φ_i(X)) * (τ(X) * ∑ 1/(φ_i(X)) - m(X)) + = Π(φ_i(X)) * (τ(X) * ∑ 1/(φ_i(X)) - m(X)) = ∑_i τ(X) * Π_{j != i} φ_j(X) - m(X) * Π(φ_i(X)) (2) */ parallelize(&mut values, |values, start| { @@ -639,7 +638,7 @@ impl Evaluator { .fold(C::Scalar::ZERO, |acc, x| acc + x); inputs * table_value - inputs_prod * m_coset[idx] }; - + #[cfg(not(feature = "logup_skip_inv"))] let rhs = { // ∑ 1 / (f_i(X) + beta) at ω^idx diff --git a/halo2_proofs/src/plonk/mv_lookup/prover.rs b/halo2_proofs/src/plonk/mv_lookup/prover.rs index 96e1241d7b..66a3cbdd85 100644 --- a/halo2_proofs/src/plonk/mv_lookup/prover.rs +++ b/halo2_proofs/src/plonk/mv_lookup/prover.rs @@ -1,15 +1,13 @@ use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX, Error, - ProvingKey, + circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX, Error, ProvingKey, }; use super::Argument; use crate::plonk::evaluation::evaluate; use crate::{ - arithmetic::{eval_polynomial, parallelize, parallelize_naive, CurveAffine, Field, par_invert}, + arithmetic::{eval_polynomial, par_invert, parallelize, parallelize_naive, CurveAffine, Field}, poly::{ commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, - Rotation, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, }, transcript::{EncodedChallenge, TranscriptWrite}, }; @@ -21,7 +19,9 @@ use std::{ ops::{Mul, MulAssign}, }; -use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator, ParallelSliceMut}; +use rayon::prelude::{ + IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator, ParallelSliceMut, +}; #[derive(Debug)] pub(in crate::plonk) struct Prepared { diff --git a/halo2_proofs/src/plonk/mv_lookup/verifier.rs b/halo2_proofs/src/plonk/mv_lookup/verifier.rs index 7816712f97..8bbf90625f 100644 --- a/halo2_proofs/src/plonk/mv_lookup/verifier.rs +++ b/halo2_proofs/src/plonk/mv_lookup/verifier.rs @@ -1,8 +1,6 @@ use std::iter; -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX, -}; +use super::super::{circuit::Expression, ChallengeBeta, ChallengeTheta, ChallengeX}; use super::Argument; use crate::{ arithmetic::{CurveAffine, Field}, @@ -133,15 +131,11 @@ impl Evaluated { let tau = t_eval + *beta; // Π(φ_i(X)) - let prod_fi = f_evals - .iter() - .fold(C::Scalar::ONE, |acc, eval| acc * eval); + let prod_fi = f_evals.iter().fold(C::Scalar::ONE, |acc, eval| acc * eval); // ∑ 1/(φ_i(X)) let sum_inv_fi = { f_evals.batch_invert(); - f_evals - .iter() - .fold(C::Scalar::ZERO, |acc, eval| acc + eval) + f_evals.iter().fold(C::Scalar::ZERO, |acc, eval| acc + eval) }; // LHS = τ(X) * Π(φ_i(X)) * (ϕ(gX) - ϕ(X)) diff --git a/halo2_proofs/tests/plonk_api.rs b/halo2_proofs/tests/plonk_api.rs index 924cde0740..b4bfd314a0 100644 --- a/halo2_proofs/tests/plonk_api.rs +++ b/halo2_proofs/tests/plonk_api.rs @@ -322,19 +322,19 @@ fn plonk_api() { let a_ = meta.query_any(a, Rotation::cur()); vec![(a_, sl)] }); - + // Add to test mvlookup meta.lookup("lookup_same", |meta| { let a_ = meta.query_any(a, Rotation::cur()); vec![(a_, sl)] }); - + meta.lookup("lookup_same", |meta| { let b_ = meta.query_any(b, Rotation::cur()); let dummy = meta.query_selector(dummy); let dummy_2 = meta.query_selector(dummy_2); let dummy_3 = meta.query_selector(dummy_3); - + vec![(dummy * dummy_2 * dummy_3 * b_, dummy_table)] }); @@ -644,7 +644,7 @@ fn plonk_api() { test_plonk_api_ipa(); */ - + test_plonk_api_gwc(); test_plonk_api_shplonk(); } From 840da815ae3f9d79d0f28d6cf700f1cdfadd70f8 Mon Sep 17 00:00:00 2001 From: Aleksei Vambol <77882392+AlekseiVambol@users.noreply.github.com> Date: Sun, 3 Mar 2024 23:51:42 +0200 Subject: [PATCH 5/6] Update prover.rs --- halo2_proofs/src/plonk/mv_lookup/prover.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/halo2_proofs/src/plonk/mv_lookup/prover.rs b/halo2_proofs/src/plonk/mv_lookup/prover.rs index 66a3cbdd85..9248097bf3 100644 --- a/halo2_proofs/src/plonk/mv_lookup/prover.rs +++ b/halo2_proofs/src/plonk/mv_lookup/prover.rs @@ -329,7 +329,7 @@ impl Prepared { for i in 0..u { // Π(φ_i(X)) let fi_prod = || { - let mut prod = C::Scalar::one(); + let mut prod = C::Scalar::ONE; for compressed_input_expression in self.compressed_inputs_expressions.iter() { prod *= *beta + compressed_input_expression[i]; } From 540427f23f826f07e8c86ad7ae7a848af9080f1e Mon Sep 17 00:00:00 2001 From: Aleksei Vambol <77882392+AlekseiVambol@users.noreply.github.com> Date: Mon, 4 Mar 2024 00:02:11 +0200 Subject: [PATCH 6/6] Update prover.rs --- halo2_proofs/src/plonk/mv_lookup/prover.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/halo2_proofs/src/plonk/mv_lookup/prover.rs b/halo2_proofs/src/plonk/mv_lookup/prover.rs index 9248097bf3..d233a59a9b 100644 --- a/halo2_proofs/src/plonk/mv_lookup/prover.rs +++ b/halo2_proofs/src/plonk/mv_lookup/prover.rs @@ -143,14 +143,14 @@ impl + Ord> Argument { .collect::>(); assert_eq!(invalid_ms.len(), blinding_factors); for mi in invalid_ms { - assert_eq!(*mi, C::Scalar::zero()); + assert_eq!(*mi, C::Scalar::ZERO); } // check sums let alpha = C::Scalar::random(&mut rng); let cs_input_sum = |compressed_input_expression: &Polynomial| { - let mut lhs_sum = C::Scalar::zero(); + let mut lhs_sum = C::Scalar::ZERO; for &fi in compressed_input_expression .iter() .take(params.n() as usize - blinding_factors - 1) @@ -161,13 +161,13 @@ impl + Ord> Argument { lhs_sum }; - let mut lhs_sum = C::Scalar::zero(); + let mut lhs_sum = C::Scalar::ZERO; for compressed_input_expression in compressed_inputs_expressions.iter() { lhs_sum += cs_input_sum(compressed_input_expression); } - let mut rhs_sum = C::Scalar::zero(); + let mut rhs_sum = C::Scalar::ZERO; for (&ti, &mi) in compressed_table_expression.iter().zip(m_values.iter()) { rhs_sum += mi * (ti + alpha).invert().unwrap(); }