Skip to content

Commit

Permalink
Merge pull request o1-labs#3009 from o1-labs/martin/saffron-update-wi…
Browse files Browse the repository at this point in the history
…th-diff-type

[saffron] Add update methods with tests
  • Loading branch information
dannywillems authored Feb 11, 2025
2 parents 39868c4 + dca3c7f commit 1c64bc0
Show file tree
Hide file tree
Showing 4 changed files with 117 additions and 20 deletions.
114 changes: 100 additions & 14 deletions saffron/src/blob.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
use crate::{
commitment::Commitment,
diff::Diff,
utils::{decode_into, encode_for_domain},
};
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations};
use ark_poly::{
univariate::DensePolynomial, EvaluationDomain, Evaluations, Radix2EvaluationDomain,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use kimchi::curve::KimchiCurve;
use mina_poseidon::FqSponge;
Expand All @@ -24,7 +27,7 @@ pub struct FieldBlob<G: CommitmentCurve> {
pub domain_size: usize,
pub commitment: Commitment<G>,
#[serde_as(as = "Vec<o1_utils::serialization::SerdeAs>")]
pub data: Vec<DensePolynomial<G::ScalarField>>,
pub chunks: Vec<DensePolynomial<G::ScalarField>>,
}

#[instrument(skip_all, level = "debug")]
Expand All @@ -51,29 +54,29 @@ impl<G: KimchiCurve> FieldBlob<G> {
let field_elements = encode_for_domain(&domain, bytes);
let domain_size = domain.size();

let data: Vec<DensePolynomial<G::ScalarField>> = debug_span!("fft").in_scope(|| {
let chunks: Vec<DensePolynomial<G::ScalarField>> = debug_span!("fft").in_scope(|| {
field_elements
.par_iter()
.map(|chunk| Evaluations::from_vec_and_domain(chunk.to_vec(), domain).interpolate())
.collect()
});
let commitment = {
let chunks = commit_to_blob_data(srs, &data);
let chunks = commit_to_blob_data(srs, &chunks);
let mut sponge = EFqSponge::new(G::other_curve_sponge_params());
Commitment::from_chunks(chunks, &mut sponge)
};

debug!(
"Encoded {:.2} MB into {} polynomials",
bytes.len() as f32 / 1_000_000.0,
data.len()
chunks.len()
);

FieldBlob {
n_bytes: bytes.len(),
domain_size,
commitment,
data,
chunks,
}
}

Expand All @@ -92,7 +95,7 @@ impl<G: KimchiCurve> FieldBlob<G> {
let mut bytes = Vec::with_capacity(blob.n_bytes);
let mut buffer = vec![0u8; m];

for p in blob.data {
for p in blob.chunks {
let evals = p.evaluate_over_domain(domain).evals;
for x in evals {
decode_into(&mut buffer, x);
Expand All @@ -103,14 +106,44 @@ impl<G: KimchiCurve> FieldBlob<G> {
bytes.truncate(blob.n_bytes);
bytes
}

pub fn update<EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>>(
&mut self,
srs: &SRS<G>,
domain: &Radix2EvaluationDomain<G::ScalarField>,
diff: Diff<G::ScalarField>,
) {
let diff_evaluations = diff.as_evaluations(domain);
let commitment = {
let commitment_diffs = diff_evaluations
.par_iter()
.map(|evals| srs.commit_evaluations_non_hiding(*domain, evals))
.collect::<Vec<_>>();
let mut sponge = EFqSponge::new(G::other_curve_sponge_params());
self.commitment.update(commitment_diffs, &mut sponge)
};
let chunks: Vec<DensePolynomial<G::ScalarField>> = diff_evaluations
.into_par_iter()
.zip(self.chunks.par_iter())
.map(|(evals, p)| {
let d_p: DensePolynomial<G::ScalarField> = evals.interpolate();
p + &d_p
})
.collect();
self.commitment = commitment;
self.chunks = chunks;
self.n_bytes = diff.new_byte_len;
}
}

#[cfg(test)]
mod tests {
use crate::{commitment::commit_to_field_elems, env};

use super::*;
use crate::utils::test_utils::*;
use crate::{diff::tests::*, utils::test_utils::*};
use ark_ec::AffineRepr;
use ark_ff::Zero;
use ark_poly::Radix2EvaluationDomain;
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge};
Expand Down Expand Up @@ -149,11 +182,64 @@ mod tests {
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_user_and_storage_provider_commitments_equal(UserData(xs) in UserData::arbitrary())
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems::<_, VestaFqSponge>(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Vesta>::encode::<_, VestaFqSponge>(&*SRS, *DOMAIN, &xs);
prop_assert_eq!(user_commitments, blob.commitment);
}
fn test_user_and_storage_provider_commitments_equal(UserData(xs) in UserData::arbitrary())
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems::<_, VestaFqSponge>(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Vesta>::encode::<_, VestaFqSponge>(&*SRS, *DOMAIN, &xs);
prop_assert_eq!(user_commitments, blob.commitment);
}
}

fn encode_to_chunk_size(xs: &[u8], chunk_size: usize) -> FieldBlob<Vesta> {
let mut blob = FieldBlob::<Vesta>::encode::<_, VestaFqSponge>(&*SRS, *DOMAIN, xs);
assert!(blob.chunks.len() <= chunk_size);
{
let pad = DensePolynomial::zero();
blob.chunks.resize(chunk_size, pad);
}
{
let pad = PolyComm::new(vec![Vesta::zero()]);
let mut commitments = blob.commitment.chunks.clone();
commitments.resize(chunk_size, pad);
let mut sponge = VestaFqSponge::new(Vesta::other_curve_sponge_params());
blob.commitment = Commitment::from_chunks(commitments, &mut sponge);
}
blob
}

proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]

fn test_allow_legal_updates((UserData(xs), UserData(ys)) in
(UserData::arbitrary().prop_flat_map(random_diff))
) {
// start with some random user data
let mut xs_blob = FieldBlob::<Vesta>::encode::<_, VestaFqSponge>(&*SRS, *DOMAIN, &xs);
let diff = Diff::<Fp>::create(&*DOMAIN, &xs, &ys).unwrap();
xs_blob.update::<VestaFqSponge>(&*SRS, &*DOMAIN, diff.clone());

// check that the user and SP agree on the new data
let user_commitment = {
let elems = encode_for_domain(&*DOMAIN, &xs);
let commitment = commit_to_field_elems::<Vesta, VestaFqSponge>(&*SRS, *DOMAIN, elems);

let commitment_diffs = diff.as_evaluations(&*DOMAIN)
.par_iter()
.map(|evals| SRS.commit_evaluations_non_hiding(*DOMAIN, evals))
.collect::<Vec<_>>();

let mut sponge = VestaFqSponge::new(Vesta::other_curve_sponge_params());
commitment.update(commitment_diffs, &mut sponge)

};

let ys_blob = encode_to_chunk_size(&ys, xs_blob.chunks.len());
prop_assert_eq!(user_commitment.clone(), ys_blob.commitment.clone());

// the updated blob should be the same as if we just start with the new data
prop_assert_eq!(xs_blob, ys_blob)
}

}
}
9 changes: 9 additions & 0 deletions saffron/src/commitment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use poly_commitment::{
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use std::ops::Add;
use tracing::instrument;

#[serde_as]
Expand All @@ -36,6 +37,14 @@ impl<G: KimchiCurve> Commitment<G> {
folded,
}
}

pub fn update<EFqSponge>(&self, diff: Vec<PolyComm<G>>, sponge: &mut EFqSponge) -> Self
where
EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>,
{
let new_chunks = self.chunks.iter().zip(diff).map(|(g, d)| g.add(&d));
Self::from_chunks(new_chunks.collect(), sponge)
}
}

#[instrument(skip_all, level = "debug")]
Expand Down
12 changes: 7 additions & 5 deletions saffron/src/diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ use tracing::instrument;
// sparse representation, keeping only the non-zero differences
#[derive(Clone, Debug, PartialEq)]
pub struct Diff<F: PrimeField> {
pub evaluation_diffs: Vec<Vec<(usize, F)>>,
pub chunks: Vec<Vec<(usize, F)>>,
pub new_byte_len: usize,
}

#[derive(Debug, Error, Clone, PartialEq)]
Expand Down Expand Up @@ -40,7 +41,8 @@ impl<F: PrimeField> Diff<F> {
new_elems.resize(old_elems.len(), padding);
}
Ok(Diff {
evaluation_diffs: new_elems
new_byte_len: new.len(),
chunks: new_elems
.par_iter()
.zip(old_elems)
.map(|(n, o)| {
Expand All @@ -60,7 +62,7 @@ impl<F: PrimeField> Diff<F> {
&self,
domain: &Radix2EvaluationDomain<F>,
) -> Vec<Evaluations<F, Radix2EvaluationDomain<F>>> {
self.evaluation_diffs
self.chunks
.par_iter()
.map(|diff| {
let mut evals = vec![F::zero(); domain.size()];
Expand Down Expand Up @@ -117,7 +119,7 @@ pub mod tests {
fn add(mut evals: Vec<Vec<Fp>>, diff: &Diff<Fp>) -> Vec<Vec<Fp>> {
evals
.par_iter_mut()
.zip(diff.evaluation_diffs.par_iter())
.zip(diff.chunks.par_iter())
.for_each(|(eval_chunk, diff_chunk)| {
diff_chunk.iter().for_each(|(j, val)| {
eval_chunk[*j] += val;
Expand All @@ -130,7 +132,7 @@ pub mod tests {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]

fn test_allow_legal_updates((UserData(xs), UserData(ys)) in
fn test_allow_legal_diff((UserData(xs), UserData(ys)) in
(UserData::arbitrary().prop_flat_map(random_diff))
) {
let diff = Diff::<Fp>::create(&*DOMAIN, &xs, &ys);
Expand Down
2 changes: 1 addition & 1 deletion saffron/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ where
{
let p = {
let init = (DensePolynomial::zero(), G::ScalarField::one());
blob.data
blob.chunks
.into_iter()
.fold(init, |(acc_poly, curr_power), curr_poly| {
(
Expand Down

0 comments on commit 1c64bc0

Please sign in to comment.