Skip to content

Commit

Permalink
Gate Code for Testing Purposes Under testing Modules (#688)
Browse files Browse the repository at this point in the history
* wip

* done

* fixes

* more fixes

* more fixes

* address comments

* revert some changes

* revert more changes

* address comments
  • Loading branch information
sai-deng authored Oct 6, 2024
1 parent e5fe60b commit c476e61
Show file tree
Hide file tree
Showing 6 changed files with 489 additions and 446 deletions.
20 changes: 0 additions & 20 deletions evm_arithmetization/src/all_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@ use std::iter;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use starky::config::StarkConfig;
use starky::cross_table_lookup::{CrossTableLookup, TableIdx, TableWithColumns};
use starky::evaluation_frame::StarkFrame;
use starky::stark::Stark;

use crate::arithmetic::arithmetic_stark;
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
Expand Down Expand Up @@ -69,24 +67,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Default for AllStark<F, D> {
}
}

impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
pub(crate) fn num_lookups_helper_columns(&self, config: &StarkConfig) -> [usize; NUM_TABLES] {
[
self.arithmetic_stark.num_lookup_helper_columns(config),
self.byte_packing_stark.num_lookup_helper_columns(config),
self.cpu_stark.num_lookup_helper_columns(config),
self.keccak_stark.num_lookup_helper_columns(config),
self.keccak_sponge_stark.num_lookup_helper_columns(config),
self.logic_stark.num_lookup_helper_columns(config),
self.memory_stark.num_lookup_helper_columns(config),
self.mem_before_stark.num_lookup_helper_columns(config),
self.mem_after_stark.num_lookup_helper_columns(config),
#[cfg(feature = "cdk_erigon")]
self.poseidon_stark.num_lookup_helper_columns(config),
]
}
}

pub type EvmStarkFrame<T, U, const N: usize> = StarkFrame<T, U, N, 0>;

/// Associates STARK tables with a unique index.
Expand Down
151 changes: 81 additions & 70 deletions evm_arithmetization/src/fixed_recursive_verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1898,48 +1898,6 @@ where
})
}

/// Returns a proof for each segment that is part of a full transaction
/// proof.
pub fn prove_all_segments(
&self,
all_stark: &AllStark<F, D>,
config: &StarkConfig,
generation_inputs: GenerationInputs<F>,
max_cpu_len_log: usize,
timing: &mut TimingTree,
abort_signal: Option<Arc<AtomicBool>>,
) -> anyhow::Result<Vec<ProverOutputData<F, C, D>>> {
features_check(&generation_inputs.clone().trim());

let segment_iterator =
SegmentDataIterator::<F>::new(&generation_inputs, Some(max_cpu_len_log));

let mut proofs = vec![];

for segment_run in segment_iterator {
let (_, mut next_data) = segment_run?;
let proof = self.prove_segment(
all_stark,
config,
generation_inputs.trim(),
&mut next_data,
timing,
abort_signal.clone(),
)?;
proofs.push(proof);
}

// Since aggregations require at least two segment proofs, add a dummy proof if
// there is only one proof.
if proofs.len() == 1 {
let mut first_proof = proofs[0].clone();
first_proof.is_dummy = true;
proofs.push(first_proof);
}

Ok(proofs)
}

/// From an initial set of STARK proofs passed with their associated
/// recursive table circuits, generate a recursive transaction proof.
/// It is aimed at being used when preprocessed table circuits have not been
Expand Down Expand Up @@ -2036,10 +1994,6 @@ where
})
}

pub fn verify_root(&self, agg_proof: ProofWithPublicInputs<F, C, D>) -> anyhow::Result<()> {
self.root.circuit.verify(agg_proof)
}

/// Create a segment aggregation proof, combining two contiguous proofs into
/// a single one. The combined proofs are segment proofs: they are
/// proofs of some parts of one execution.
Expand Down Expand Up @@ -2153,18 +2107,6 @@ where
Ok(agg_output)
}

pub fn verify_segment_aggregation(
&self,
agg_proof: &ProofWithPublicInputs<F, C, D>,
) -> anyhow::Result<()> {
self.segment_aggregation.circuit.verify(agg_proof.clone())?;
check_cyclic_proof_verifier_data(
agg_proof,
&self.segment_aggregation.circuit.verifier_only,
&self.segment_aggregation.circuit.common,
)
}

/// Creates a final batch proof, once all segments of a given
/// transaction batch have been combined into a single aggregation proof.
///
Expand Down Expand Up @@ -2244,18 +2186,6 @@ where
})
}

pub fn verify_batch_aggregation(
&self,
txn_proof: &ProofWithPublicInputs<F, C, D>,
) -> anyhow::Result<()> {
self.batch_aggregation.circuit.verify(txn_proof.clone())?;
check_cyclic_proof_verifier_data(
txn_proof,
&self.batch_aggregation.circuit.verifier_only,
&self.batch_aggregation.circuit.common,
)
}

/// If the proof is not an aggregation, we set the cyclic vk to a dummy
/// value, so that it corresponds to the aggregation cyclic vk. If the proof
/// is dummy, we set `is_dummy` to `true`. Note that only the rhs can be
Expand Down Expand Up @@ -2998,6 +2928,87 @@ where
+ (1 << circuit.common.config.fri_config.cap_height) * NUM_HASH_OUT_ELTS
}

pub mod testing {
use super::*;

impl<F, C, const D: usize> AllRecursiveCircuits<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F> + 'static,
C::Hasher: AlgebraicHasher<F>,
{
/// Returns a proof for each segment that is part of a full transaction
/// proof.
pub fn prove_all_segments(
&self,
all_stark: &AllStark<F, D>,
config: &StarkConfig,
generation_inputs: GenerationInputs<F>,
max_cpu_len_log: usize,
timing: &mut TimingTree,
abort_signal: Option<Arc<AtomicBool>>,
) -> anyhow::Result<Vec<ProverOutputData<F, C, D>>> {
features_check(&generation_inputs.clone().trim());

let segment_iterator =
SegmentDataIterator::<F>::new(&generation_inputs, Some(max_cpu_len_log));

let mut proofs = vec![];

for segment_run in segment_iterator {
let (_, mut next_data) = segment_run?;
let proof = self.prove_segment(
all_stark,
config,
generation_inputs.trim(),
&mut next_data,
timing,
abort_signal.clone(),
)?;
proofs.push(proof);
}

// Since aggregations require at least two segment proofs, add a dummy proof if
// there is only one proof.
if proofs.len() == 1 {
let mut first_proof = proofs[0].clone();
first_proof.is_dummy = true;
proofs.push(first_proof);
}

Ok(proofs)
}

pub fn verify_root(&self, agg_proof: ProofWithPublicInputs<F, C, D>) -> anyhow::Result<()> {
self.root.circuit.verify(agg_proof)
}

pub fn verify_segment_aggregation(
&self,
agg_proof: &ProofWithPublicInputs<F, C, D>,
) -> anyhow::Result<()> {
self.segment_aggregation.circuit.verify(agg_proof.clone())?;
check_cyclic_proof_verifier_data(
agg_proof,
&self.segment_aggregation.circuit.verifier_only,
&self.segment_aggregation.circuit.common,
)
}

pub fn verify_batch_aggregation(
&self,
txn_proof: &ProofWithPublicInputs<F, C, D>,
) -> anyhow::Result<()> {
self.batch_aggregation.circuit.verify(txn_proof.clone())?;
check_cyclic_proof_verifier_data(
txn_proof,
&self.batch_aggregation.circuit.verifier_only,
&self.batch_aggregation.circuit.common,
)
}
}
}

#[cfg(test)]
#[cfg(not(feature = "cdk_erigon"))]
mod tests {
Expand Down
77 changes: 49 additions & 28 deletions evm_arithmetization/src/get_challenges.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ use plonky2::field::extension::Extendable;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use starky::config::StarkConfig;
use starky::lookup::get_grand_product_challenge_set;

use crate::proof::*;
use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
Expand Down Expand Up @@ -247,36 +245,59 @@ pub(crate) fn observe_public_values_target<
observe_burn_addr_target::<F, C, D>(challenger, public_values.burn_addr.clone());
}

impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
/// Computes all Fiat-Shamir challenges used in the STARK proof.
pub(crate) fn get_challenges(
&self,
config: &StarkConfig,
) -> Result<AllProofChallenges<F, D>, ProgramError> {
let mut challenger = Challenger::<F, C::Hasher>::new();
pub mod testing {
use plonky2::field::extension::Extendable;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::GenericConfig;
use starky::config::StarkConfig;
use starky::lookup::{get_grand_product_challenge_set, GrandProductChallengeSet};
use starky::proof::StarkProofChallenges;

let stark_proofs = &self.multi_proof.stark_proofs;
use crate::get_challenges::observe_public_values;
use crate::proof::*;
use crate::witness::errors::ProgramError;
use crate::NUM_TABLES;

for proof in stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
}
/// Randomness for all STARKs.
pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usize> {
/// Randomness used in each STARK proof.
pub stark_challenges: [StarkProofChallenges<F, D>; NUM_TABLES],
/// Randomness used for cross-table lookups. It is shared by all STARKs.
pub ctl_challenges: GrandProductChallengeSet<F>,
}

impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
/// Computes all Fiat-Shamir challenges used in the STARK proof.
pub(crate) fn get_challenges(
&self,
config: &StarkConfig,
) -> Result<AllProofChallenges<F, D>, ProgramError> {
let mut challenger = Challenger::<F, C::Hasher>::new();

observe_public_values::<F, C, D>(&mut challenger, &self.public_values)?;
let stark_proofs = &self.multi_proof.stark_proofs;

let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, config.num_challenges);
for proof in stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
}

Ok(AllProofChallenges {
stark_challenges: core::array::from_fn(|i| {
challenger.compact();
stark_proofs[i].proof.get_challenges(
&mut challenger,
Some(&ctl_challenges),
true,
config,
)
}),
ctl_challenges,
})
observe_public_values::<F, C, D>(&mut challenger, &self.public_values)?;

let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, config.num_challenges);

Ok(AllProofChallenges {
stark_challenges: core::array::from_fn(|i| {
challenger.compact();
stark_proofs[i].proof.get_challenges(
&mut challenger,
Some(&ctl_challenges),
true,
config,
)
}),
ctl_challenges,
})
}
}
}
11 changes: 1 addition & 10 deletions evm_arithmetization/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ use plonky2::plonk::config::{GenericConfig, GenericHashOut, Hasher};
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
use serde::{Deserialize, Serialize};
use starky::config::StarkConfig;
use starky::lookup::GrandProductChallengeSet;
use starky::proof::{MultiProof, StarkProofChallenges};
use starky::proof::MultiProof;

use crate::all_stark::NUM_TABLES;
use crate::util::{get_h160, get_h256, get_u256, h256_limbs, h2u};
Expand Down Expand Up @@ -40,14 +39,6 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
}
}

/// Randomness for all STARKs.
pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usize> {
/// Randomness used in each STARK proof.
pub stark_challenges: [StarkProofChallenges<F, D>; NUM_TABLES],
/// Randomness used for cross-table lookups. It is shared by all STARKs.
pub ctl_challenges: GrandProductChallengeSet<F>,
}

/// Memory values which are public.
#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, Serialize)]
#[serde(bound = "")]
Expand Down
2 changes: 1 addition & 1 deletion evm_arithmetization/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ use starky::stark::Stark;

use crate::all_stark::{AllStark, Table, NUM_TABLES};
use crate::cpu::kernel::aggregator::KERNEL;
use crate::generation::segments::GenerationSegmentData;
use crate::generation::{generate_traces, GenerationInputs, TrimmedGenerationInputs};
use crate::get_challenges::observe_public_values;
use crate::proof::{AllProof, MemCap, PublicValues, DEFAULT_CAP_LEN};
use crate::GenerationSegmentData;

/// Generate traces, then create all STARK proofs.
pub fn prove<F, C, const D: usize>(
Expand Down
Loading

0 comments on commit c476e61

Please sign in to comment.