Skip to content

Commit

Permalink
test: add extend_segments_col_random
Browse files Browse the repository at this point in the history
  • Loading branch information
DarkingLee committed Jul 23, 2023
1 parent 1335189 commit 5e783bc
Show file tree
Hide file tree
Showing 4 changed files with 96 additions and 173 deletions.
9 changes: 5 additions & 4 deletions crates/melo-erasure-coding/src/extend_col.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ pub fn extend_segments_col(

let mut extended_cols = vec![];

for (i, _) in [0..(Segment::SIZE)].iter().enumerate() {
let col = sorted_rows
for i in 0..(Segment::SIZE) {
let col: Vec<BlsScalar> = sorted_rows
.iter()
.skip(i)
.step_by(Segment::SIZE as usize)
Expand All @@ -61,12 +61,13 @@ pub fn extend_segments_col(

let mut extended_segments = vec![];

extended_proofs.iter().enumerate().for_each(|(i, proof)| {
// 需要获取奇数部分
extended_proofs.iter().skip(1).step_by(2).enumerate().for_each(|(i, proof)| {
let position = melo_core_primitives::kzg::Position { x, y: (i + k) as u32 };
let data = extended_cols.iter().map(|col| col[i]).collect::<Vec<BlsScalar>>();
let segment = Segment { position, content: SegmentData { data, proof: proof.clone() } };
extended_segments.push(segment);
});

Ok(extended_segments)
}
}
4 changes: 2 additions & 2 deletions crates/melo-erasure-coding/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ pub fn bytes_vec_to_blobs(bytes_vec: &Vec<Vec<u8>>) -> Result<Vec<Blob>, String>
Ok(blobs)
}

pub fn bytes_vec_to_segments(bytes_vec: &Vec<Vec<u8>>) -> Result<Vec<Vec<Segment>>, String> {
pub fn blobs_to_segments(blobs: &Vec<Blob>) -> Result<Vec<Vec<Segment>>, String> {
let kzg = KZG::new(embedded_kzg_settings());
let matrix = bytes_vec_to_blobs(bytes_vec)?
let matrix = blobs
.iter()
.enumerate()
.map(|(y, blob)| {
Expand Down
249 changes: 84 additions & 165 deletions crates/melo-erasure-coding/src/tests.rs
Original file line number Diff line number Diff line change
@@ -1,107 +1,82 @@
use crate::erasure_coding::*;
use crate::extend_col::extend_segments_col;
use crate::recovery::*;
use crate::segment::*;
// use blst_rust::types::g1::FsG1;
use alloc::vec;
use kzg::G1;
use rust_kzg_blst::types::g1::FsG1;
use core::slice::Chunks;
use kzg::FFTFr;
use kzg::Fr;
use melo_core_primitives::kzg::BlsScalar;
use melo_core_primitives::kzg::Polynomial;
use melo_core_primitives::kzg::ReprConvert;
use melo_core_primitives::kzg::{embedded_kzg_settings, KZG};
use melo_core_primitives::kzg::{embedded_kzg_settings, KZG, KZGCommitment};
use rust_kzg_blst::types::fr::FsFr;
use rust_kzg_blst::types::poly::FsPoly;
use rust_kzg_blst::utils::reverse_bit_order;
use std::iter;
use std::num::NonZeroUsize;
// use subspace_core_primitives::crypto::kzg::Commitment;
// use subspace_core_primitives::crypto::Scalar;

fn reverse_bits_limited(length: usize, value: usize) -> usize {
let unused_bits = length.leading_zeros();
value.reverse_bits() >> unused_bits
}

// TODO: This could have been done in-place, once implemented can be exposed as a utility
fn concatenated_to_interleaved<T>(input: Vec<T>) -> Vec<T>
where
T: Clone,
{
if input.len() <= 1 {
return input;
}

let (first_half, second_half) = input.split_at(input.len() / 2);

first_half.iter().zip(second_half).flat_map(|(a, b)| [a, b]).cloned().collect()
}

// TODO: This could have been done in-place, once implemented can be exposed as a utility
fn interleaved_to_concatenated<T>(input: Vec<T>) -> Vec<T>
where
T: Clone,
{
let first_half = input.iter().step_by(2);
let second_half = input.iter().skip(1).step_by(2);

first_half.chain(second_half).cloned().collect()
}
// pub fn recovery_row_from_segments(
// segments: &Vec<Segment>,
// kzg: &KZG,
// ) -> Result<Vec<Segment>, String> {
#[test]
fn recovery_row_from_segments_test() {
let scale = NonZeroUsize::new(4).unwrap();
let kzg = KZG::new(embedded_kzg_settings());
let num_shards = 2usize.pow(scale.get() as u32);
let source_shards = (0..num_shards / 2)
fn random_poly(s: usize) -> Polynomial {
let coeffs = (0..s)
.map(|_| rand::random::<[u8; 31]>())
.map(BlsScalar::from)
.collect::<Vec<_>>();

let parity_shards = extend(kzg.get_fs(), &source_shards).unwrap();

let partial_shards = concatenated_to_interleaved(
iter::repeat(None)
.take(num_shards / 4)
.chain(source_shards.iter().skip(num_shards / 4).copied().map(Some))
.chain(parity_shards.iter().take(num_shards / 4).copied().map(Some))
.chain(iter::repeat(None).take(num_shards / 4))
.collect::<Vec<_>>(),
);

let recovered = interleaved_to_concatenated(recover(kzg.get_fs(), &partial_shards).unwrap());

assert_eq!(recovered, source_shards.iter().chain(&parity_shards).copied().collect::<Vec<_>>());
let poly = FsPoly { coeffs: BlsScalar::vec_to_repr(coeffs) };
Polynomial::from(poly)
}

// #[test]
// fn recovery_row_from_segments_test() {
// let scale = NonZeroUsize::new(4).unwrap();
// let kzg = KZG::new(embedded_kzg_settings());
// let num_shards = 2usize.pow(scale.get() as u32);
// let source_shards = (0..num_shards / 2)
// .map(|_| rand::random::<[u8; 31]>())
// .map(BlsScalar::from)
// .collect::<Vec<_>>();

// let parity_shards = extend(kzg.get_fs(), &source_shards).unwrap();

// let partial_shards = concatenated_to_interleaved(
// iter::repeat(None)
// .take(num_shards / 4)
// .chain(source_shards.iter().skip(num_shards / 4).copied().map(Some))
// .chain(parity_shards.iter().take(num_shards / 4).copied().map(Some))
// .chain(iter::repeat(None).take(num_shards / 4))
// .collect::<Vec<_>>(),
// );

// let recovered = interleaved_to_concatenated(recover(kzg.get_fs(), &partial_shards).unwrap());

// assert_eq!(recovered, source_shards.iter().chain(&parity_shards).copied().collect::<Vec<_>>());
// }

#[test]
fn commit_multi_test() {
fn commit_multi_random() {
let chunk_len: usize = 16;
let chunk_count: usize = 4;
let num_shards = chunk_len * chunk_count;

let kzg = KZG::new(embedded_kzg_settings());

let s = (0..num_shards)
.map(|_| rand::random::<[u8; 31]>())
.map(BlsScalar::from)
.collect::<Vec<_>>();
let mut poly: Polynomial = Polynomial::new(num_shards).unwrap();
for i in 0..num_shards {
poly.0.coeffs[i] = FsFr::from(s[i]);
}
let poly = random_poly(num_shards);

// Commit to the polynomial
let commitment = kzg.commit(&poly).unwrap();
// Compute the multi proofs
let proofs = kzg.all_proofs(&poly).unwrap();

let mut extended_coeffs = vec![FsFr::zero(); 2 * num_shards];
for (i, extended_coeff) in extended_coeffs.iter_mut().enumerate().take(num_shards) {
*extended_coeff = *s[i];
}
let mut extended_coeffs = poly.0.coeffs.clone();

extended_coeffs.resize(poly.0.coeffs.len() * 2, FsFr::zero());

let mut extended_coeffs_fft = kzg.get_fs().fft_fr(&extended_coeffs, false).unwrap();

Expand All @@ -125,7 +100,7 @@ fn commit_multi_test() {
}

#[test]
fn extend_and_commit_multi_test() {
fn extend_and_commit_multi_random() {
let chunk_len: usize = 16;
let chunk_count: usize = 4;
let num_shards = chunk_len * chunk_count;
Expand Down Expand Up @@ -178,103 +153,47 @@ fn extend_and_commit_multi_test() {
}
}

#[test]
fn extend_fs_g1_random() {
let kzg = KZG::new(embedded_kzg_settings());
let mut commits: Vec<KZGCommitment> = Vec::new();
for _rep in 0..4 {
commits.push(KZGCommitment(FsG1::rand()));
}
let extended_commits = extend_fs_g1(kzg.get_fs(), &commits).unwrap();
assert!(extended_commits.len() == 8);
assert!(extended_commits[2].0 == commits[1].0);
}

// #[test]
// fn basic_data() {
// let scale = NonZeroUsize::new(8).unwrap();
// let num_shards = 2usize.pow(scale.get() as u32);
// let ec = ErasureCoding::new(scale).unwrap();

// let source_shards = (0..num_shards / 2)
// .map(|_| rand::random::<[u8; Scalar::SAFE_BYTES]>())
// .map(Scalar::from)
// .collect::<Vec<_>>();

// let parity_shards = ec.extend(&source_shards).unwrap();

// assert_ne!(source_shards, parity_shards);

// let partial_shards = concatenated_to_interleaved(
// iter::repeat(None)
// .take(num_shards / 4)
// .chain(source_shards.iter().skip(num_shards / 4).copied().map(Some))
// .chain(parity_shards.iter().take(num_shards / 4).copied().map(Some))
// .chain(iter::repeat(None).take(num_shards / 4))
// .collect::<Vec<_>>(),
// );

// let recovered = interleaved_to_concatenated(ec.recover(&partial_shards).unwrap());

// assert_eq!(
// recovered,
// source_shards
// .iter()
// .chain(&parity_shards)
// .copied()
// .collect::<Vec<_>>()
// );
// }

// #[test]
// fn basic_commitments() {
// let scale = NonZeroUsize::new(7).unwrap();
// let num_shards = 2usize.pow(scale.get() as u32);
// let ec = ErasureCoding::new(scale).unwrap();

// let source_commitments = (0..num_shards / 2)
// .map(|_| Commitment::from(FsG1::rand()))
// .collect::<Vec<_>>();

// let parity_commitments = ec.extend_commitments(&source_commitments).unwrap();

// assert_eq!(source_commitments.len() * 2, parity_commitments.len());

// // Even indices must be source
// assert_eq!(
// source_commitments,
// parity_commitments
// .iter()
// .step_by(2)
// .copied()
// .collect::<Vec<_>>()
// );
// }

// #[test]
// fn bad_shards_number() {
// let scale = NonZeroUsize::new(8).unwrap();
// let num_shards = 2usize.pow(scale.get() as u32);
// let ec = ErasureCoding::new(scale).unwrap();

// let source_shards = vec![Default::default(); num_shards - 1];

// assert!(ec.extend(&source_shards).is_err());

// let partial_shards = vec![Default::default(); num_shards - 1];
// assert!(ec.recover(&partial_shards).is_err());
// }

// #[test]
// fn not_enough_partial() {
// let scale = NonZeroUsize::new(8).unwrap();
// let num_shards = 2usize.pow(scale.get() as u32);
// let ec = ErasureCoding::new(scale).unwrap();

// let mut partial_shards = vec![None; num_shards];

// // Less than half is not sufficient
// partial_shards
// .iter_mut()
// .take(num_shards / 2 - 1)
// .for_each(|maybe_scalar| {
// maybe_scalar.replace(Scalar::default());
// });
// assert!(ec.recover(&partial_shards).is_err());

// // Any half is sufficient
// partial_shards
// .last_mut()
// .unwrap()
// .replace(Scalar::default());
// assert!(ec.recover(&partial_shards).is_ok());
// }
#[test]
fn extend_segments_col_random() {
// Build multiple polynomials with random coefficients
let chunk_len: usize = 16;
let chunk_count: usize = 4;
let num_shards = chunk_len * chunk_count;
let k: usize = 4;
let polys = (0..k).map(|_| random_poly(num_shards)).collect::<Vec<_>>();
// Commit to all polynomials
let kzg = KZG::new(embedded_kzg_settings());
let commitments = polys.iter().map(|poly| kzg.commit(poly).unwrap()).collect::<Vec<_>>();
// Extend polynomial commitments to twice the size
let extended_commitments = extend_fs_g1(kzg.get_fs(), &commitments).unwrap();
// Convert all polynomials to segments
let matrix = polys
.iter()
.enumerate()
.map(|(i, poly)| poly_to_segment_vec(&poly, &kzg, i).unwrap())
.collect::<Vec<_>>();
assert!(matrix[0][0].verify(&kzg, &commitments[0], chunk_count).unwrap());
// Pick a column from the segments
let pick_col_index: usize = 1;
let col = matrix.iter().map(|row| row[pick_col_index].clone()).collect::<Vec<_>>();
// Extend the column
let extended_col = extend_segments_col(kzg.get_fs(), &col).unwrap();

for i in 0..(chunk_count) {
let pick_s = extended_col[i].clone();
assert!(pick_s.verify(&kzg, &extended_commitments[i * 2 + 1], chunk_count).unwrap());
}

}
7 changes: 5 additions & 2 deletions primitives/src/segment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ extern crate alloc;

use alloc::{string::String, vec::Vec};
use derive_more::{AsMut, AsRef, From};
use rust_kzg_blst::utils::reverse_bit_order;

use crate::config::{FIELD_ELEMENTS_PER_BLOB, SEGMENT_LENGTH};
use crate::kzg::{
Expand Down Expand Up @@ -68,13 +69,15 @@ impl Segment {
let segment_data = SegmentData::new(data, proof);
Self { position, content: segment_data }
}

pub fn verify(&self, kzg: &KZG, commitment: &KZGCommitment, count: usize) -> Result<bool, String> {
let mut ys = BlsScalar::vec_to_repr(self.content.data.clone());
reverse_bit_order(&mut ys);
kzg.check_proof_multi(
&commitment,
self.position.x as usize,
count,
BlsScalar::slice_to_repr(&self.content.data),
&ys,
&self.content.proof,
Self::SIZE
)
Expand Down

0 comments on commit 5e783bc

Please sign in to comment.