From 4c390323d3b4731091d87669d4eb464e10eb8f67 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Tue, 28 May 2024 13:59:31 +0530 Subject: [PATCH 01/35] rebase/add: rebased kzgpeerdas to wip-peerdas, no conflicts with unstable --- beacon_chain/spec/eip7594_helpers.nim | 873 +++++--------------------- 1 file changed, 154 insertions(+), 719 deletions(-) diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index 118f1b2fe0..da87d67f1b 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -10,648 +10,20 @@ # Uncategorized helper functions from the spec import + tables, algorithm, std/macros, results, stew/assign2, + nim-ssz-serialization/ssz_serialization/proofs, chronicles, std/sequtils, ./[beacon_time, crypto], eth/p2p/discoveryv5/[node], ./helpers, - ./datatypes/[eip7594] + ./datatypes/[eip7594, deneb] -#### `cell_to_coset_evals` - - -# proc cell_to_coset_evals(cell: Cell): CosetEvals = -# discard """ -# Convert an untrusted ``Cell`` into a trusted ``CosetEvals``. -# """ -# # evals = [] -# # for i in range(FIELD_ELEMENTS_PER_CELL): -# # start = i * BYTES_PER_FIELD_ELEMENT -# # end = (i + 1) * BYTES_PER_FIELD_ELEMENT -# # value = bytes_to_bls_field(cell[start:end]) -# # evals.append(value) -# # return CosetEvals(evals) - - -# #### `coset_evals_to_cell` - - -# proc coset_evals_to_cell(coset_evals: CosetEvals): Cell = -# discard """ -# Convert a trusted ``CosetEval`` into an untrusted ``Cell``. -# """ -# # cell = [] -# # for i in range(FIELD_ELEMENTS_PER_CELL): -# # cell += bls_field_to_bytes(coset_evals[i]) -# # return Cell(cell) - - -# ### Linear combinations - -# #### `g2_lincomb` - - -# proc g2_lincomb(points: openArray[G2Point], scalars: seq[BLSFieldElement]): Bytes96 = -# discard """ -# BLS multiscalar multiplication in G2. This can be naively implemented using double-and-add. -# """ -# # assert len(points) == len(scalars) - -# # if len(points) == 0: -# # return bls.G2_to_bytes96(bls.Z2()) - -# # points_g2 = [] -# # for point in points: -# # points_g2.append(bls.bytes96_to_G2(point)) - -# # result = bls.multi_exp(points_g2, scalars) -# # return Bytes96(bls.G2_to_bytes96(result)) - - -# ### FFTs - -# #### `_fft_field` - - -# proc xfft_field(vals: openArray[BLSFieldElement], -# roots_of_unity: openArray[BLSFieldElement]): seq[BLSFieldElement] = -# # if len(vals) == 1: -# # return vals -# # L = _fft_field(vals[::2], roots_of_unity[::2]) -# # R = _fft_field(vals[1::2], roots_of_unity[::2]) -# # o = [BLSFieldElement(0) for _ in vals] -# # for i, (x, y) in enumerate(zip(L, R)): -# # y_times_root = (int(y) * int(roots_of_unity[i])) % BLS_MODULUS -# # o[i] = BLSFieldElement((int(x) + y_times_root) % BLS_MODULUS) -# # o[i + len(L)] = BLSFieldElement((int(x) - y_times_root + BLS_MODULUS) % BLS_MODULUS) -# # return o -# discard - -# #### `fft_field` - - -# proc fft_field(vals: openArray[BLSFieldElement], -# roots_of_unity: openArray[BLSFieldElement], -# inv: bool=false): openArray[BLSFieldElement] = -# # if inv: -# # # Inverse FFT -# # invlen = pow(len(vals), BLS_MODULUS - 2, BLS_MODULUS) -# # return [BLSFieldElement((int(x) * invlen) % BLS_MODULUS) -# # for x in xfft_field(vals, list(roots_of_unity[0:1]) + list(roots_of_unity[:0:-1]))] -# # else: -# # # Regular FFT -# # return xfft_field(vals, roots_of_unity) -# discard - - -# ### Polynomials in coefficient form - -# #### `polynomial_eval_to_coeff` - - -# proc polynomial_eval_to_coeff(polynomial: Polynomial): PolynomialCoeff = - -# discard """ -# Interpolates a polynomial (given in evaluation form) to a polynomial in coefficient form. -# """ -# # let roots_of_unity = compute_roots_of_unity(FIELD_ELEMENTS_PER_BLOB) -# # let polynomial_coeff = fft_field(bit_reversal_permutation(list(polynomial)), roots_of_unity, inv=True) - -# # return polynomial_coeff - -# discard -# #### `add_polynomialcoeff` - - -# proc add_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff): PolynomialCoeff = -# discard """ -# Sum the coefficient form polynomials ``a`` and ``b``. -# """ -# # a, b = (a, b) if len(a) >= len(b) else (b, a) -# # length_a = len(a) -# # length_b = len(b) -# # return [(a[i] + (b[i] if i < length_b else 0)) % BLS_MODULUS for i in range(length_a)] - -# discard -# #### `neg_polynomialcoeff` - - -# proc neg_polynomialcoeff(a: PolynomialCoeff): PolynomialCoeff = -# discard """ -# Negative of coefficient form polynomial ``a`` -# """ -# # return [(BLS_MODULUS - x) % BLS_MODULUS for x in a] - -# discard -# #### `multiply_polynomialcoeff` - - -# proc multiply_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff): PolynomialCoeff = -# discard """ -# Multiplies the coefficient form polynomials ``a`` and ``b`` -# """ -# # assert len(a) + len(b) <= FIELD_ELEMENTS_PER_EXT_BLOB - -# # r = [0] -# # for power, coef in enumerate(a): -# # summand = [0] * power + [int(coef) * int(x) % BLS_MODULUS for x in b] -# # r = add_polynomialcoeff(r, summand) -# # return r - -# discard -# #### `divide_polynomialcoeff` - - -# proc divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff): PolynomialCoeff = -# discard """ -# Long polynomial division for two coefficient form polynomials ``a`` and ``b`` -# """ -# # a = a.copy() # Make a copy since `a` is passed by reference -# # o: List[BLSFieldElement] = [] -# # apos = len(a) - 1 -# # bpos = len(b) - 1 -# # diff = apos - bpos -# # while diff >= 0: -# # quot = div(a[apos], b[bpos]) -# # o.insert(0, quot) -# # for i in range(bpos, -1, -1): -# # a[diff + i] = (int(a[diff + i]) - int(b[i] + BLS_MODULUS) * int(quot)) % BLS_MODULUS -# # apos -= 1 -# # diff -= 1 -# # return [x % BLS_MODULUS for x in o] - -# discard -# #### `shift_polynomialcoeff` - - -# proc shift_polynomialcoeff(polynomial_coeff: PolynomialCoeff, factor: BLSFieldElement): PolynomialCoeff = -# discard """ -# Shift the evaluation of a polynomial in coefficient form by factor. -# This results in a new polynomial g(x) = f(factor * x) -# """ -# # factor_power = 1 -# # inv_factor = pow(int(factor), BLS_MODULUS - 2, BLS_MODULUS) -# # o = [] -# # for p in polynomial_coeff: -# # o.append(int(p) * factor_power % BLS_MODULUS) -# # factor_power = factor_power * inv_factor % BLS_MODULUS -# # return o - -# discard -# #### `interpolate_polynomialcoeff` - - -# proc interpolate_polynomialcoeff(xs: openArray[BLSFieldElement], ys: openArray[BLSFieldElement]): PolynomialCoeff = -# discard """ -# Lagrange interpolation: Finds the lowest degree polynomial that takes the value ``ys[i]`` at ``x[i]`` -# for all i. -# Outputs a coefficient form polynomial. Leading coefficients may be zero. -# """ -# # assert len(xs) == len(ys) -# # r = [0] - -# # for i in range(len(xs)): -# # summand = [ys[i]] -# # for j in range(len(ys)): -# # if j != i: -# # weight_adjustment = bls_modular_inverse(int(xs[i]) - int(xs[j])) -# # summand = multiply_polynomialcoeff( -# # summand, [((BLS_MODULUS - int(weight_adjustment)) * int(xs[j])) % BLS_MODULUS, weight_adjustment] -# # ) -# # r = add_polynomialcoeff(r, summand) - -# # return r - -# discard -# #### `vanishing_polynomialcoeff` - - -# proc vanishing_polynomialcoeff(xs: openArray[BLSFieldElement]): PolynomialCoeff = -# discard """ -# Compute the vanishing polynomial on ``xs`` (in coefficient form) -# """ -# # p = [1] -# # for x in xs: -# # p = multiply_polynomialcoeff(p, [-int(x) + BLS_MODULUS, 1]) -# # return p - -# discard -# #### `evaluate_polynomialcoeff` - - -# proc evaluate_polynomialcoeff(polynomial_coeff: PolynomialCoeff, z: BLSFieldElement): BLSFieldElement = -# discard """ -# Evaluate a coefficient form polynomial at ``z`` using Horner's schema -# """ -# # y = 0 -# # for coef in polynomial_coeff[::-1]: -# # y = (int(y) * int(z) + int(coef)) % BLS_MODULUS -# # return BLSFieldElement(y % BLS_MODULUS) - -# discard -# ### KZG multiproofs - -# # Extended KZG functions for multiproofs - -# #### `compute_kzg_proof_multi_impl` - - -# proc compute_kzg_proof_multi_impl( -# polynomial_coeff: PolynomialCoeff, -# zs: Coset): (KzgProof, CosetEvals) = -# discard """ -# Compute a KZG multi-evaluation proof for a set of `k` points. - -# This is done by committing to the following quotient polynomial: -# Q(X) = f(X) - I(X) / Z(X) -# Where: -# - I(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points -# - Z(X) is the degree `k` polynomial that evaluates to zero on all `k` points - -# We further note that since the degree of I(X) is less than the degree of Z(X), -# the computation can be simplified in monomial form to Q(X) = f(X) / Z(X) -# """ - -# # # For all points, compute the evaluation of those points -# # ys = [evaluate_polynomialcoeff(polynomial_coeff, z) for z in zs] - -# # # Compute Z(X) -# # denominator_poly = vanishing_polynomialcoeff(zs) - -# # # Compute the quotient polynomial directly in monomial form -# # quotient_polynomial = divide_polynomialcoeff(polynomial_coeff, denominator_poly) - -# # return KZGProof(g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(quotient_polynomial)], quotient_polynomial)), ys - -# discard -# #### `verify_kzg_proof_multi_impl` - - -# proc verify_kzg_proof_multi_impl(commitment: KzgCommitment, -# zs: Coset, -# ys: CosetEvals, -# proof: KZGProof): bool = -# discard """ -# Verify a KZG multi-evaluation proof for a set of `k` points. - -# This is done by checking if the following equation holds: -# Q(x) Z(x) = f(X) - I(X) -# Where: -# f(X) is the polynomial that we want to verify opens at `k` points to `k` values -# Q(X) is the quotient polynomial computed by the prover -# I(X) is the degree k-1 polynomial that evaluates to `ys` at all `zs`` points -# Z(X) is the polynomial that evaluates to zero on all `k` points - -# The verifier receives the commitments to Q(X) and f(X), so they check the equation -# holds by using the following pairing equation: -# e([Q(X)]_1, [Z(X)]_2) == e([f(X)]_1 - [I(X)]_1, [1]_2) -# """ - -# # assert len(zs) == len(ys) - -# # # Compute [Z(X)]_2 -# # zero_poly = g2_lincomb(KZG_SETUP_G2_MONOMIAL[:len(zs) + 1], vanishing_polynomialcoeff(zs)) -# # # Compute [I(X)]_1 -# # interpolated_poly = g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(zs)], interpolate_polynomialcoeff(zs, ys)) - -# # return (bls.pairing_check([ -# # [bls.bytes48_to_G1(proof), bls.bytes96_to_G2(zero_poly)], -# # [ -# # bls.add(bls.bytes48_to_G1(commitment), bls.neg(bls.bytes48_to_G1(interpolated_poly))), -# # bls.neg(bls.bytes96_to_G2(KZG_SETUP_G2_MONOMIAL[0])), -# # ], -# # ])) - - -# ### Cell cosets - -# #### `coset_for_cell` - - -# proc coset_for_cell(cell_id: CellID): Coset = -# discard """ -# Get the coset for a given ``cell_id`` -# """ -# # assert cell_id < CELLS_PER_EXT_BLOB -# # roots_of_unity_brp = bit_reversal_permutation( -# # compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) -# # ) -# # return Coset(roots_of_unity_brp[FIELD_ELEMENTS_PER_CELL * cell_id:FIELD_ELEMENTS_PER_CELL * (cell_id + 1)]) - - -# ## Cells - -# ### Cell computation - -# #### `compute_cells_and_kzg_proofs` - - -# proc compute_cells_and_kzg_proofs(blob: Blob): Tuple[ -# Vector[Cell, CELLS_PER_EXT_BLOB], -# Vector[KZGProof, CELLS_PER_EXT_BLOB]] = -# discard """ -# Compute all the cell proofs for an extended blob. This is an inefficient O(n^2) algorithm, -# for performant implementation the FK20 algorithm that runs in O(n log n) should be -# used instead. - -# Public method. -# """ -# # assert len(blob) == BYTES_PER_BLOB - -# # polynomial = blob_to_polynomial(blob) -# # polynomial_coeff = polynomial_eval_to_coeff(polynomial) - -# # cells = [] -# # proofs = [] - -# # for i in range(CELLS_PER_EXT_BLOB): -# # coset = coset_for_cell(CellID(i)) -# # proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset) -# # cells.append(coset_evals_to_cell(ys)) -# # proofs.append(proof) - -# # return cells, proofs - - -# #### `compute_cells` - - -# proc compute_cells(blob: Blob): array[CELLS_PER_EXT_BLOB, Cell]: -# discard """ -# Compute the cell data for an extended blob (without computing the proofs). - -# Public method. -# """ -# # assert len(blob) == BYTES_PER_BLOB - -# # polynomial = blob_to_polynomial(blob) -# # polynomial_coeff = polynomial_eval_to_coeff(polynomial) - -# # extended_data = fft_field(polynomial_coeff + [0] * FIELD_ELEMENTS_PER_BLOB, -# # compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)) -# # extended_data_rbo = bit_reversal_permutation(extended_data) -# # cells = [] -# # for cell_id in range(CELLS_PER_EXT_BLOB): -# # start = cell_id * FIELD_ELEMENTS_PER_CELL -# # end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL -# # cells.append(coset_evals_to_cell(CosetEvals(extended_data_rbo[start:end]))) -# # return cells - - -# ### Cell verification - -# #### `verify_cell_kzg_proof` - - -# proc verify_cell_kzg_proof(commitment_bytes: Bytes48, -# cell_id: CellID, -# cell: Cell, -# proof_bytes: Bytes48): bool = -# discard """ -# Check a cell proof - -# Public method. -# """ -# # assert len(commitment_bytes) == BYTES_PER_COMMITMENT -# # assert cell_id < CELLS_PER_EXT_BLOB -# # assert len(cell) == BYTES_PER_CELL -# # assert len(proof_bytes) == BYTES_PER_PROOF - -# # coset = coset_for_cell(cell_id) - -# # return verify_kzg_proof_multi_impl( -# # bytes_to_kzg_commitment(commitment_bytes), -# # coset, -# # cell_to_coset_evals(cell), -# # bytes_to_kzg_proof(proof_bytes)) - - -# #### `verify_cell_kzg_proof_batch` - - -# proc verify_cell_kzg_proof_batch(row_commitments_bytes: openArray[Bytes48], -# row_indices: openArray[RowIndex], -# column_indices: openArray[ColumnIndex], -# cells: openArray[Cell], -# proofs_bytes: openArray[Bytes48]): bool = -# discard """ -# Verify a set of cells, given their corresponding proofs and their coordinates (row_id, column_id) in the blob -# matrix. The list of all commitments is also provided in row_commitments_bytes. - -# This function implements the naive algorithm of checking every cell -# individually; an efficient algorithm can be found here: -# https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240 - -# This implementation does not require randomness, but for the algorithm that -# requires it, `RANDOM_CHALLENGE_KZG_CELL_BATCH_DOMAIN` should be used to compute -# the challenge value. - -# Public method. -# """ -# # assert len(cells) == len(proofs_bytes) == len(row_indices) == len(column_indices) -# # for commitment_bytes in row_commitments_bytes: -# # assert len(commitment_bytes) == BYTES_PER_COMMITMENT -# # for row_index in row_indices: -# # assert row_index < len(row_commitments_bytes) -# # for column_index in column_indices: -# # assert column_index < CELLS_PER_EXT_BLOB -# # for cell in cells: -# # assert len(cell) == BYTES_PER_CELL -# # for proof_bytes in proofs_bytes: -# # assert len(proof_bytes) == BYTES_PER_PROOF - -# # # Get commitments via row IDs -# # commitments_bytes = [row_commitments_bytes[row_index] for row_index in row_indices] - -# # # Get objects from bytes -# # commitments = [bytes_to_kzg_commitment(commitment_bytes) for commitment_bytes in commitments_bytes] -# # cosets_evals = [cell_to_coset_evals(cell) for cell in cells] -# # proofs = [bytes_to_kzg_proof(proof_bytes) for proof_bytes in proofs_bytes] - -# # return all( -# # verify_kzg_proof_multi_impl(commitment, coset_for_cell(column_index), coset_evals, proof) -# # for commitment, column_index, coset_evals, proof in zip(commitments, column_indices, cosets_evals, proofs) -# # ) - - -# ## Reconstruction - -# ### `construct_vanishing_polynomial` - - -# proc construct_vanishing_polynomial(missing_cell_ids: openArray[CellID]) -> tuple[ -# seq[BLSFieldElement], -# seq[BLSFieldElement]]: -# discard """ -# Given the cells that are missing from the data, compute the polynomial that vanishes at every point that -# corresponds to a missing field element. -# """ -# # # Get the small domain -# # roots_of_unity_reduced = compute_roots_of_unity(CELLS_PER_EXT_BLOB) - -# # # Compute polynomial that vanishes at all the missing cells (over the small domain) -# # short_zero_poly = vanishing_polynomialcoeff([ -# # roots_of_unity_reduced[reverse_bits(missing_cell_id, CELLS_PER_EXT_BLOB)] -# # for missing_cell_id in missing_cell_ids -# # ]) - -# # # Extend vanishing polynomial to full domain using the closed form of the vanishing polynomial over a coset -# # zero_poly_coeff = [BLSFieldElement(0)] * FIELD_ELEMENTS_PER_EXT_BLOB -# # for i, coeff in enumerate(short_zero_poly): -# # zero_poly_coeff[i * FIELD_ELEMENTS_PER_CELL] = coeff - -# # # Compute evaluations of the extended vanishing polynomial -# # zero_poly_eval = fft_field(zero_poly_coeff, -# # compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)) -# # zero_poly_eval_brp = bit_reversal_permutation(zero_poly_eval) - -# # # Sanity check -# # for cell_id in range(CELLS_PER_EXT_BLOB): -# # start = cell_id * FIELD_ELEMENTS_PER_CELL -# # end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL -# # if cell_id in missing_cell_ids: -# # assert all(a == 0 for a in zero_poly_eval_brp[start:end]) -# # else: # cell_id in cell_ids -# # assert all(a != 0 for a in zero_poly_eval_brp[start:end]) - -# # return zero_poly_coeff, zero_poly_eval - - -# ### `recover_shifted_data` - - -# proc recover_shifted_data(cell_ids: openArray[CellID], -# cells: openArray[Cell], -# zero_poly_eval: openArray[BLSFieldElement], -# zero_poly_coeff: openArray[BLSFieldElement], -# roots_of_unity_extended: openArray[BLSFieldElement]): tuple[ -# seq[BLSFieldElement], -# seq[BLSFieldElement], -# BLSFieldElement] = -# discard """ -# Given Z(x), return polynomial Q_1(x)=(E*Z)(k*x) and Q_2(x)=Z(k*x) and k^{-1}. -# """ -# # shift_factor = BLSFieldElement(PRIMITIVE_ROOT_OF_UNITY) -# # shift_inv = div(BLSFieldElement(1), shift_factor) - -# # extended_evaluation_rbo = [0] * FIELD_ELEMENTS_PER_EXT_BLOB -# # for cell_id, cell in zip(cell_ids, cells): -# # start = cell_id * FIELD_ELEMENTS_PER_CELL -# # end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL -# # extended_evaluation_rbo[start:end] = cell -# # extended_evaluation = bit_reversal_permutation(extended_evaluation_rbo) - -# # # Compute (E*Z)(x) -# # extended_evaluation_times_zero = [BLSFieldElement(int(a) * int(b) % BLS_MODULUS) -# # for a, b in zip(zero_poly_eval, extended_evaluation)] - -# # extended_evaluations_fft = fft_field(extended_evaluation_times_zero, roots_of_unity_extended, inv=True) - -# # # Compute (E*Z)(k*x) -# # shifted_extended_evaluation = shift_polynomialcoeff(extended_evaluations_fft, shift_factor) -# # # Compute Z(k*x) -# # shifted_zero_poly = shift_polynomialcoeff(zero_poly_coeff, shift_factor) - -# # eval_shifted_extended_evaluation = fft_field(shifted_extended_evaluation, roots_of_unity_extended) -# # eval_shifted_zero_poly = fft_field(shifted_zero_poly, roots_of_unity_extended) - -# # return eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv - - -# ### `recover_original_data` - - -# proc recover_original_data(eval_shifted_extended_evaluation: openArray[BLSFieldElement], -# eval_shifted_zero_poly: openArray[BLSFieldElement], -# shift_inv: BLSFieldElement, -# roots_of_unity_extended: openArray[BLSFieldElement]): seq[BLSFieldElement] = -# discard """ -# Given Q_1, Q_2 and k^{-1}, compute P(x). -# """ -# # # Compute Q_3 = Q_1(x)/Q_2(x) = P(k*x) -# # eval_shifted_reconstructed_poly = [ -# # div(a, b) -# # for a, b in zip(eval_shifted_extended_evaluation, eval_shifted_zero_poly) -# # ] - -# # shifted_reconstructed_poly = fft_field(eval_shifted_reconstructed_poly, roots_of_unity_extended, inv=True) - -# # # Unshift P(k*x) by k^{-1} to get P(x) -# # reconstructed_poly = shift_polynomialcoeff(shifted_reconstructed_poly, shift_inv) - -# # reconstructed_data = bit_reversal_permutation(fft_field(reconstructed_poly, roots_of_unity_extended)) - -# # return reconstructed_data - - -# ### `recover_all_cells` - - -# proc recover_all_cells(cell_ids: openArray[CellID], cells: openArray[Cell]): openArray[Cell] = -# discard """ -# Recover all of the cells in the extended blob from FIELD_ELEMENTS_PER_EXT_BLOB evaluations, -# half of which can be missing. -# This algorithm uses FFTs to recover cells faster than using Lagrange implementation, as can be seen here: -# https://ethresear.ch/t/reed-solomon-erasure-code-recovery-in-n-log-2-n-time-with-ffts/3039 - -# A faster version thanks to Qi Zhou can be found here: -# https://github.com/ethereum/research/blob/51b530a53bd4147d123ab3e390a9d08605c2cdb8/polynomial_reconstruction/polynomial_reconstruction_danksharding.py - -# Public method. -# """ -# # assert len(cell_ids) == len(cells) -# # # Check we have enough cells to be able to perform the reconstruction -# # assert CELLS_PER_EXT_BLOB / 2 <= len(cell_ids) <= CELLS_PER_EXT_BLOB -# # # Check for duplicates -# # assert len(cell_ids) == len(set(cell_ids)) -# # # Check that each cell is the correct length -# # for cell in cells: -# # assert len(cell) == BYTES_PER_CELL - -# # # Get the extended domain -# # roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) - -# # # Convert cells to coset evals -# # cosets_evals = [cell_to_coset_evals(cell) for cell in cells] - -# # missing_cell_ids = [CellID(cell_id) for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids] -# # zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids) - -# # eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data( -# # cell_ids, -# # cosets_evals, -# # zero_poly_eval, -# # zero_poly_coeff, -# # roots_of_unity_extended, -# # ) - -# # reconstructed_data = recover_original_data( -# # eval_shifted_extended_evaluation, -# # eval_shifted_zero_poly, -# # shift_inv, -# # roots_of_unity_extended, -# # ) - -# # for cell_id, coset_evals in zip(cell_ids, cosets_evals): -# # start = cell_id * FIELD_ELEMENTS_PER_CELL -# # end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL -# # assert reconstructed_data[start:end] == coset_evals - -# # reconstructed_data_as_cells = [ -# # coset_evals_to_cell(reconstructed_data[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL]) -# # for i in range(CELLS_PER_EXT_BLOB)] - -# # return reconstructed_data_as_cells - - - -# #### `get_custody_columns` - proc sortedColumnIndices*(columnsPerSubnet: ColumnIndex, subnetIds: HashSet[uint64]): seq[ColumnIndex] = var res: seq[ColumnIndex] = @[] for i in 0 ..< columnsPerSubnet: @@ -663,101 +35,164 @@ proc sortedColumnIndices*(columnsPerSubnet: ColumnIndex, subnetIds: HashSet[uint proc get_custody_columns*(node_id: NodeId, custody_subnet_count: uint64): Result[seq[ColumnIndex], cstring] = - # assert custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT - if not (custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT): - return err("Eip7594: Custody subnet count exceeds the DATA_COLUMN_SIDECAR_SUBNET_COUNT") - - var subnet_ids: HashSet[uint64] - var current_id = node_id - - while subnet_ids.len < int(custody_subnet_count): - - # var subnet_id_bytes: seq[byte] - let subnet_id_bytes = eth2digest(current_id.toBytesLE().toOpenArray(0,8)) - var subnet_id = bytes_to_uint64(subnet_id_bytes.data) mod DATA_COLUMN_SIDECAR_SUBNET_COUNT - - if subnet_id notin subnet_ids: - subnet_ids.incl(subnet_id) - - if current_id == UInt256.high.NodeId: - # Overflow prevention - current_id = NodeId(StUint[256].zero) - current_id += NodeId(StUint[256].one) - - # assert len(subnet_ids) == len(set(subnet_ids)) - if not (subnet_ids.len == subnet_ids.len): - return err("Eip7594: Subnet ids are not unique") - - # columns_per_subnet = NUMBER_OF_COLUMNS // DATA_COLUMN_SIDECAR_SUBNET_COUNT - let columns_per_subnet = NUMBER_OF_COLUMNS div DATA_COLUMN_SIDECAR_SUBNET_COUNT + # assert custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT + if not (custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT): + return err("Eip7594: Custody subnet count exceeds the DATA_COLUMN_SIDECAR_SUBNET_COUNT") + + var subnet_ids: HashSet[uint64] + var current_id = node_id + + while subnet_ids.len < int(custody_subnet_count): + + # var subnet_id_bytes: seq[byte] + let subnet_id_bytes = eth2digest(current_id.toBytesLE().toOpenArray(0,8)) + var subnet_id = bytes_to_uint64(subnet_id_bytes.data) mod DATA_COLUMN_SIDECAR_SUBNET_COUNT - ok(sortedColumnIndices(ColumnIndex(columns_per_subnet), subnet_ids)) - - -# #### `compute_extended_matrix` + if subnet_id notin subnet_ids: + subnet_ids.incl(subnet_id) + if current_id == UInt256.high.NodeId: + # Overflow prevention + current_id = NodeId(StUint[256].zero) + current_id += NodeId(StUint[256].one) -# proc compute_extended_matrix(blobs: openArray[Blob]): ExtendedMatrix = -# discard """ -# Return the full ``ExtendedMatrix``. + # assert len(subnet_ids) == len(set(subnet_ids)) + if not (subnet_ids.len == subnet_ids.len): + return err("Eip7594: Subnet ids are not unique") -# This helper demonstrates the relationship between blobs and ``ExtendedMatrix``. -# The data structure for storing cells is implementation-dependent. -# """ -# # extended_matrix = [] -# # for blob in blobs: -# # extended_matrix.extend(compute_cells(blob)) -# # return ExtendedMatrix(extended_matrix) + # columns_per_subnet = NUMBER_OF_COLUMNS // DATA_COLUMN_SIDECAR_SUBNET_COUNT + let columns_per_subnet = NUMBER_OF_COLUMNS div DATA_COLUMN_SIDECAR_SUBNET_COUNT + + ok(sortedColumnIndices(ColumnIndex(columns_per_subnet), subnet_ids)) -# #### `recover_matrix` - - -# proc recover_matrix(cells_dict: Table[(BlobIndex, CellID), Cell], blob_count: uint64): ExtendedMatrix = -# discard """ -# Return the recovered ``ExtendedMatrix``. - -# This helper demonstrates how to apply ``recover_all_cells``. -# The data structure for storing cells is implementation-dependent. -# """ -# # extended_matrix: List[Cell] = [] -# # for blob_index in range(blob_count): -# # cell_ids = [cell_id for b_index, cell_id in cells_dict.keys() if b_index == blob_index] -# # cells = [cells_dict[(BlobIndex(blob_index), cell_id)] for cell_id in cell_ids] - -# # all_cells_for_row = recover_all_cells(cell_ids, cells) -# # extended_matrix.extend(all_cells_for_row) -# # return ExtendedMatrix(extended_matrix) +# #### `compute_extended_matrix` +proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[ExtendedMatrix, cstring] = + # This helper demonstrates the relationship between blobs and `ExtendedMatrix` + var extended_matrix: ExtendedMatrix + for blob in blobs: + let computed_cell = computeCellsAndKzgProofs(blob) + discard extended_matrix.add(computed_cell) + ok(extended_matrix) + +proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), Cell], blobCount: uint64): Result[ExtendedMatrix, cstring] = + # This helper demonstrates how to apply recover_all_cells + # The data structure for storing cells is implementation-dependent + + var extended_matrix: ExtendedMatrix + + for blobIndex in 0'u64.. Date: Wed, 29 May 2024 14:02:56 +0530 Subject: [PATCH 02/35] feat: added kzg specs to gossip validation rules, fixed peerdas from C API --- .../gossip_processing/gossip_validation.nim | 26 +++- beacon_chain/spec/datatypes/eip7594.nim | 8 +- beacon_chain/spec/eip7594_helpers.nim | 140 +++++++++++------- 3 files changed, 117 insertions(+), 57 deletions(-) diff --git a/beacon_chain/gossip_processing/gossip_validation.nim b/beacon_chain/gossip_processing/gossip_validation.nim index 7855f166e4..20a1ccffa5 100644 --- a/beacon_chain/gossip_processing/gossip_validation.nim +++ b/beacon_chain/gossip_processing/gossip_validation.nim @@ -13,7 +13,7 @@ import results, # Internals ../spec/[ - beaconstate, state_transition_block, forks, helpers, network, signatures], + beaconstate, state_transition_block, forks, helpers, network, signatures, eip7594_helpers], ../consensus_object_pools/[ attestation_pool, blockchain_dag, blob_quarantine, block_quarantine, spec_cache, light_client_pool, sync_committee_msg_pool, @@ -207,6 +207,18 @@ func check_blob_sidecar_inclusion_proof( ok() +func check_data_column_sidecar_inclusion_proof( + data_column_sidecar: DataColumnSidecar): Result[void, ValidationError] = + let res = data_column_sidecar.verify_data_column_sidecar_inclusion_proof() + if res.isErr: + return errReject(res.error) + +proc check_data_column_sidecar_kzg_proofs( + data_column_sidecar: DataColumnSidecar): Result[void, ValidationError] = + let res = data_column_sidecar.verify_data_column_sidecar_kzg_proofs() + if res.isErr: + return errReject(res.error) + # Gossip Validation # ---------------------------------------------------------------- @@ -502,11 +514,19 @@ proc validateDataColumnSidecar*( if not (block_header.slot > dag.finalizedHead.slot): return errIgnore("DataColumnSidecar: slot already finalized") - # TODO: [REJECT] The sidecar's `kzg_commitments` inclusion proof is valid as verified by + # [REJECT] The sidecar's `kzg_commitments` inclusion proof is valid as verified by # `verify_data_column_sidecar_inclusion_proof(sidecar)`. + block: + let v = check_data_column_sidecar_inclusion_proof(data_column_sidecar) + if v.isErr: + return dag.checkedReject(v.error) - # TODO: [REJECT] The sidecar's column data is valid as + # [REJECT] The sidecar's column data is valid as # verified by `verify_data_column_kzg_proofs(sidecar)` + block: + let r = check_data_column_sidecar_kzg_proofs(data_column_sidecar) + if r.isErr: + return dag.checkedReject(r.error) # [IGNORE] The sidecar is the first sidecar for the tuple # (block_header.slot, block_header.proposer_index, blob_sidecar.index) diff --git a/beacon_chain/spec/datatypes/eip7594.nim b/beacon_chain/spec/datatypes/eip7594.nim index bc292558e1..143f6df77f 100644 --- a/beacon_chain/spec/datatypes/eip7594.nim +++ b/beacon_chain/spec/datatypes/eip7594.nim @@ -20,6 +20,8 @@ type Coset* = array[FIELD_ELEMENTS_PER_CELL, BLSFieldElement] CosetEvals* = array[FIELD_ELEMENTS_PER_CELL, BLSFieldElement] Cell* = KzgCell + Cells* = KzgCells + CellsAndProofs* = KzgCellsAndKzgProofs CellID* = uint64 RowIndex* = uint64 ColumnIndex* = uint64 @@ -34,8 +36,8 @@ const TARGET_NUMBER_OF_PEERS* = 70 type - DataColumn* = List[Cell, Limit(MAX_BLOB_COMMITMENTS_PER_BLOCK)] - ExtendedMatrix* = List[Cell, Limit(MAX_CELLS_IN_EXTENDED_MATRIX)] + DataColumn* = List[KzgCell, Limit(MAX_BLOB_COMMITMENTS_PER_BLOCK)] + ExtendedMatrix* = List[KzgCell, Limit(MAX_CELLS_IN_EXTENDED_MATRIX)] DataColumnSidecar* = object index*: ColumnIndex # Index of column in extended matrix @@ -44,7 +46,7 @@ type kzg_proofs*: List[KzgProof, Limit(MAX_BLOB_COMMITMENTS_PER_BLOCK)] signed_block_header*: SignedBeaconBlockHeader kzg_commitments_inclusion_proof*: - array[KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, KzgBytes32] + array[KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, Eth2Digest] func shortLog*(v: DataColumnSidecar): auto = ( diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index da87d67f1b..631c04e90a 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -8,21 +8,20 @@ {.push raises: [].} # Uncategorized helper functions from the spec - import tables, algorithm, std/macros, - results, - stew/assign2, - nim-ssz-serialization/ssz_serialization/proofs, + stew/results, + ssz_serialization/proofs, chronicles, - std/sequtils, ./[beacon_time, crypto], eth/p2p/discoveryv5/[node], ./helpers, ./datatypes/[eip7594, deneb] + +var ctx: KzgCtx proc sortedColumnIndices*(columnsPerSubnet: ColumnIndex, subnetIds: HashSet[uint64]): seq[ColumnIndex] = var res: seq[ColumnIndex] = @[] @@ -33,6 +32,7 @@ proc sortedColumnIndices*(columnsPerSubnet: ColumnIndex, subnetIds: HashSet[uint res.sort() res +# https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#get_custody_columns proc get_custody_columns*(node_id: NodeId, custody_subnet_count: uint64): Result[seq[ColumnIndex], cstring] = # assert custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT @@ -65,18 +65,23 @@ proc get_custody_columns*(node_id: NodeId, custody_subnet_count: uint64): Result ok(sortedColumnIndices(ColumnIndex(columns_per_subnet), subnet_ids)) - -# #### `compute_extended_matrix` - +# https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#compute_extended_matrix proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[ExtendedMatrix, cstring] = # This helper demonstrates the relationship between blobs and `ExtendedMatrix` var extended_matrix: ExtendedMatrix for blob in blobs: - let computed_cell = computeCellsAndKzgProofs(blob) - discard extended_matrix.add(computed_cell) + let res = computeCells(ctx, blob) + + if res.isErr: + return err("Error computing kzg cells and kzg proofs") + + discard extended_matrix.add(res.get()) + ok(extended_matrix) -proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), Cell], blobCount: uint64): Result[ExtendedMatrix, cstring] = +# https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#recover_matrix +proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: uint64): Result[ExtendedMatrix, cstring] = + # This helper demonstrates how to apply recover_all_cells # The data structure for storing cells is implementation-dependent @@ -92,7 +97,7 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), Cell], blobCount: ui if blIdx == blobIndex: cellIds.add(cellId) - var cells: seq[Cell] = @[] + var cells: seq[KzgCell] = @[] for cellId in cellIds: var interim_key = (BlobIndex(blobIndex), cellId) @@ -102,79 +107,91 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), Cell], blobCount: ui cells.add(cell) except: debug "DataColumn: Key not found in Cell Dictionary", interim_key - var allCellsForRow: Cells - allCellsForRow = recoverAllCells(cellIds, cells) - discard extended_matrix.add(allCellsForRow) - ok(extended_matrix) + let allCellsForRow = recoverAllCells(ctx, cellIds, cells) + discard extended_matrix.add(allCellsForRow.get()) -proc get_data_column_sidecars*(signed_block: deneb.SignedBeaconBlock, blobs: seq[KzgBlob]): Result[seq[DataColumnSidecar]] = + ok(extended_matrix) -# #### `get_data_column_sidecars` +# https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#get_data_column_sidecars +proc get_data_column_sidecars*(signed_block: deneb.SignedBeaconBlock, blobs: seq[KzgBlob]): Result[seq[DataColumnSidecar], cstring] = + var sidecar: DataColumnSidecar var signed_block_header: deneb.SignedBeaconBlockHeader var blck = signed_block.message - let - kzgCommitmentInclusionProof = build_proof(blck.body, 32'u64) - - if kzgCommitmentInclusionProof.isErr(): - fatal "EIP7549: Could not compute Merkle proof" - var cellsAndProofs: seq[CellsAndProofs] + var cellsAndProofs: seq[KzgCellsAndKzgProofs] = @[] for blob in blobs: let - computed_cell = computeCellsAndKzgProofs(blob) + computed_cell = computeCellsAndKzgProofs(ctx, blob) if computed_cell.isErr(): fatal "EIP7549: Could not compute cells" - cellsAndProofs.add(computed_cell) + cellsAndProofs.add(computed_cell.get()) let blobCount = blobs.len - var cells: seq[seq[Cell]] = @[] - var proofs: seq[seq[KzgProof]] = @[] + var + cells: seq[seq[KzgCell]] + proofs: seq[seq[KzgProof]] for i in 0.. Date: Wed, 29 May 2024 14:12:01 +0530 Subject: [PATCH 03/35] fix copyright year, and push raises --- beacon_chain/spec/datatypes/eip7594.nim | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/beacon_chain/spec/datatypes/eip7594.nim b/beacon_chain/spec/datatypes/eip7594.nim index 143f6df77f..b1f4d7fc53 100644 --- a/beacon_chain/spec/datatypes/eip7594.nim +++ b/beacon_chain/spec/datatypes/eip7594.nim @@ -1,3 +1,12 @@ +# beacon_chain +# Copyright (c) 2022-2024 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + import "."/[base, deneb], kzg4844 export base, kzg4844 From 80387f13065c299f157b3b0b0f9685a9baf7b47e Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Thu, 30 May 2024 03:51:32 +0530 Subject: [PATCH 04/35] fix: code styles --- beacon_chain/spec/eip7594_helpers.nim | 40 +++++++++++++-------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index 631c04e90a..ffa9372bc7 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -9,9 +9,7 @@ # Uncategorized helper functions from the spec import - tables, - algorithm, - std/macros, + std/[algorithm, macros, tables], stew/results, ssz_serialization/proofs, chronicles, @@ -39,14 +37,14 @@ proc get_custody_columns*(node_id: NodeId, custody_subnet_count: uint64): Result if not (custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT): return err("Eip7594: Custody subnet count exceeds the DATA_COLUMN_SIDECAR_SUBNET_COUNT") - var subnet_ids: HashSet[uint64] - var current_id = node_id + var + subnet_ids: HashSet[uint64] + current_id = node_id while subnet_ids.len < int(custody_subnet_count): - - # var subnet_id_bytes: seq[byte] let subnet_id_bytes = eth2digest(current_id.toBytesLE().toOpenArray(0,8)) - var subnet_id = bytes_to_uint64(subnet_id_bytes.data) mod DATA_COLUMN_SIDECAR_SUBNET_COUNT + var subnet_id = bytes_to_uint64(subnet_id_bytes.data) mod + DATA_COLUMN_SIDECAR_SUBNET_COUNT if subnet_id notin subnet_ids: subnet_ids.incl(subnet_id) @@ -81,7 +79,6 @@ proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[ExtendedMatrix, cstr # https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#recover_matrix proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: uint64): Result[ExtendedMatrix, cstring] = - # This helper demonstrates how to apply recover_all_cells # The data structure for storing cells is implementation-dependent @@ -97,7 +94,7 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: if blIdx == blobIndex: cellIds.add(cellId) - var cells: seq[KzgCell] = @[] + var cells: seq[KzgCell] for cellId in cellIds: var interim_key = (BlobIndex(blobIndex), cellId) @@ -109,13 +106,13 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: debug "DataColumn: Key not found in Cell Dictionary", interim_key let allCellsForRow = recoverAllCells(ctx, cellIds, cells) - discard extended_matrix.add(allCellsForRow.get()) + let check = extended_matrix.add(allCellsForRow.get()) + doAssert check == true, "DataColumn: Could not add cells to the extended matrix" ok(extended_matrix) # https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#get_data_column_sidecars proc get_data_column_sidecars*(signed_block: deneb.SignedBeaconBlock, blobs: seq[KzgBlob]): Result[seq[DataColumnSidecar], cstring] = - var sidecar: DataColumnSidecar var signed_block_header: deneb.SignedBeaconBlockHeader var blck = signed_block.message @@ -194,18 +191,20 @@ proc verify_data_column_sidecar_kzg_proofs*(sidecar: DataColumnSidecar): Result[ return err("EIP7594: Data column sidecar kzg_commitments length is not equal to the kzg_proofs length") # Iterate through the row indices - var rowIndices: seq[RowIndex] = @[] + var rowIndices: seq[RowIndex] for i in 0.. Date: Thu, 30 May 2024 03:53:24 +0530 Subject: [PATCH 05/35] fix:reduced blank lines --- beacon_chain/spec/eip7594_helpers.nim | 3 --- 1 file changed, 3 deletions(-) diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index ffa9372bc7..6763c7599b 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -166,7 +166,6 @@ proc get_data_column_sidecars*(signed_block: deneb.SignedBeaconBlock, blobs: seq proc validate_data_column_sidecar*( expected_commitments: seq[KzgCommitment], rowIndex: seq[RowIndex], columnIndex: seq[ColumnIndex], column: seq[KzgCell], proofs: seq[KzgProof]): Result[void, string] = - let res = verifyCellKzgProofBatch(expected_commitments, rowIndex, columnIndex, column, proofs).valueOr: return err("DataColumnSidecar: Proof verification error: " & error()) @@ -220,7 +219,6 @@ proc verify_data_column_sidecar_kzg_proofs*(sidecar: DataColumnSidecar): Result[ # https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/p2p-interface.md#verify_data_column_sidecar_inclusion_proof proc verify_data_column_sidecar_inclusion_proof*(sidecar: DataColumnSidecar): Result[void, string] = - # Verify if the given KZG commitments are included in the beacon block let gindex = kzg_commitment_inclusion_proof_gindex(sidecar.index) if not is_valid_merkle_branch( @@ -233,4 +231,3 @@ proc verify_data_column_sidecar_inclusion_proof*(sidecar: DataColumnSidecar): Re return err("DataColumnSidecar: inclusion proof not valid") ok() - \ No newline at end of file From 538ce0a805d6a4793d5a7dc755611dd038daad22 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 31 May 2024 13:15:03 +0530 Subject: [PATCH 06/35] fix: added global ctx verification in computeCellsAndProofs and recoverAllCells --- beacon_chain/spec/eip7594_helpers.nim | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index 6763c7599b..0eb52001fc 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -17,9 +17,6 @@ import eth/p2p/discoveryv5/[node], ./helpers, ./datatypes/[eip7594, deneb] - - -var ctx: KzgCtx proc sortedColumnIndices*(columnsPerSubnet: ColumnIndex, subnetIds: HashSet[uint64]): seq[ColumnIndex] = var res: seq[ColumnIndex] = @[] @@ -68,7 +65,7 @@ proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[ExtendedMatrix, cstr # This helper demonstrates the relationship between blobs and `ExtendedMatrix` var extended_matrix: ExtendedMatrix for blob in blobs: - let res = computeCells(ctx, blob) + let res = computeCells(blob) if res.isErr: return err("Error computing kzg cells and kzg proofs") @@ -76,7 +73,7 @@ proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[ExtendedMatrix, cstr discard extended_matrix.add(res.get()) ok(extended_matrix) - + # https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#recover_matrix proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: uint64): Result[ExtendedMatrix, cstring] = # This helper demonstrates how to apply recover_all_cells @@ -105,7 +102,7 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: except: debug "DataColumn: Key not found in Cell Dictionary", interim_key - let allCellsForRow = recoverAllCells(ctx, cellIds, cells) + let allCellsForRow = recoverAllCells(cellIds, cells) let check = extended_matrix.add(allCellsForRow.get()) doAssert check == true, "DataColumn: Could not add cells to the extended matrix" @@ -121,7 +118,7 @@ proc get_data_column_sidecars*(signed_block: deneb.SignedBeaconBlock, blobs: seq for blob in blobs: let - computed_cell = computeCellsAndKzgProofs(ctx, blob) + computed_cell = computeCellsAndKzgProofs(blob) if computed_cell.isErr(): fatal "EIP7549: Could not compute cells" From e5f82cac6f4ad75d3416af330441dde3fbbe0f5a Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 31 May 2024 13:39:40 +0530 Subject: [PATCH 07/35] upstream fix in kzg4844-c --- vendor/nim-kzg4844 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 01216c0b8e..664846ed9b 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 01216c0b8ebcd0d3466fbfb08a87e96dc1b7c305 +Subproject commit 664846ed9b2fe5371e336dd12bedf9f80b5d9eef From 07c3ec5e663954fa80ca11d6e2208eec898ee1fa Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 31 May 2024 17:53:29 +0530 Subject: [PATCH 08/35] switched c-kzg-4844 --- vendor/nim-kzg4844 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 664846ed9b..b3612edd29 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 664846ed9b2fe5371e336dd12bedf9f80b5d9eef +Subproject commit b3612edd296137e6d383223a080d6dbe8370eebb From ca19e121360306c6fc18d2b91495b0747b9256ae Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Sun, 2 Jun 2024 10:39:45 +0530 Subject: [PATCH 09/35] experimental chenges --- .../gossip_processing/gossip_validation.nim | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/beacon_chain/gossip_processing/gossip_validation.nim b/beacon_chain/gossip_processing/gossip_validation.nim index 20a1ccffa5..cbf17f8352 100644 --- a/beacon_chain/gossip_processing/gossip_validation.nim +++ b/beacon_chain/gossip_processing/gossip_validation.nim @@ -521,22 +521,22 @@ proc validateDataColumnSidecar*( if v.isErr: return dag.checkedReject(v.error) - # [REJECT] The sidecar's column data is valid as - # verified by `verify_data_column_kzg_proofs(sidecar)` - block: - let r = check_data_column_sidecar_kzg_proofs(data_column_sidecar) - if r.isErr: - return dag.checkedReject(r.error) + # # [REJECT] The sidecar's column data is valid as + # # verified by `verify_data_column_kzg_proofs(sidecar)` + # block: + # let r = check_data_column_sidecar_kzg_proofs(data_column_sidecar) + # if r.isErr: + # return dag.checkedReject(r.error) # [IGNORE] The sidecar is the first sidecar for the tuple # (block_header.slot, block_header.proposer_index, blob_sidecar.index) # with valid header signature, sidecar inclusion proof, and kzg proof. let block_root = hash_tree_root(block_header) if dag.getBlockRef(block_root).isSome(): - return errIgnore("BlobSidecar: already have block") + return errIgnore("DataColumnSidecar: already have block") if blobQuarantine[].hasBlob( block_header.slot, block_header.proposer_index, data_column_sidecar.index): - return errIgnore("BlobSidecar: already have valid blob from same proposer") + return errIgnore("DataColumnSidecar: already have valid blob from same proposer") # [IGNORE] The sidecar's block's parent (defined by # `block_header.parent_root`) has been seen (via both gossip and @@ -545,13 +545,13 @@ proc validateDataColumnSidecar*( # # [REJECT] The sidecar's block's parent (defined by # `block_header.parent_root`) passes validation. - let parent = dag.getBlockRef(block_header.parent_root).valueOr: - if block_header.parent_root in quarantine[].unviable: - quarantine[].addUnviable(block_root) - return dag.checkedReject("DataColumnSidecar: parent not validated") - else: - quarantine[].addMissing(block_header.parent_root) - return errIgnore("DataColumnSidecar: parent not found") + # let parent = dag.getBlockRef(block_header.parent_root).valueOr: + # if block_header.parent_root in quarantine[].unviable: + # quarantine[].addUnviable(block_root) + # return dag.checkedReject("DataColumnSidecar: parent not validated") + # else: + # quarantine[].addMissing(block_header.parent_root) + # return errIgnore("DataColumnSidecar: parent not found") # [REJECT] The sidecar is proposed by the expected `proposer_index` # for the block's slot in the context of the current shuffling @@ -562,10 +562,10 @@ proc validateDataColumnSidecar*( # REJECT, instead IGNORE this message. let proposer = getProposer(dag, parent, block_header.slot).valueOr: warn "cannot compute proposer for blob" - return errIgnore("BlobSidecar: Cannot compute proposer") # internal issue + return errIgnore("DataColumnSidecar: Cannot compute proposer") # internal issue if uint64(proposer) != block_header.proposer_index: - return dag.checkedReject("BlobSidecar: Unexpected proposer") + return dag.checkedReject("DataColumnSidecar: Unexpected proposer") # [REJECT] The proposer signature of `blob_sidecar.signed_block_header`, # is valid with respect to the `block_header.proposer_index` pubkey. From 578a10693fe11ed8d1d1e8d34d8b9b3522c16734 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Sun, 2 Jun 2024 10:46:10 +0530 Subject: [PATCH 10/35] conditional disabling --- .../gossip_processing/gossip_validation.nim | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/beacon_chain/gossip_processing/gossip_validation.nim b/beacon_chain/gossip_processing/gossip_validation.nim index cbf17f8352..4710c69047 100644 --- a/beacon_chain/gossip_processing/gossip_validation.nim +++ b/beacon_chain/gossip_processing/gossip_validation.nim @@ -545,13 +545,13 @@ proc validateDataColumnSidecar*( # # [REJECT] The sidecar's block's parent (defined by # `block_header.parent_root`) passes validation. - # let parent = dag.getBlockRef(block_header.parent_root).valueOr: - # if block_header.parent_root in quarantine[].unviable: - # quarantine[].addUnviable(block_root) - # return dag.checkedReject("DataColumnSidecar: parent not validated") - # else: - # quarantine[].addMissing(block_header.parent_root) - # return errIgnore("DataColumnSidecar: parent not found") + let parent = dag.getBlockRef(block_header.parent_root).valueOr: + if block_header.parent_root in quarantine[].unviable: + quarantine[].addUnviable(block_root) + return dag.checkedReject("DataColumnSidecar: parent not validated") + else: + quarantine[].addMissing(block_header.parent_root) + # return errIgnore("DataColumnSidecar: parent not found") # [REJECT] The sidecar is proposed by the expected `proposer_index` # for the block's slot in the context of the current shuffling From caf5557d8b2993643895a6f6a3260fa5b3dcdb48 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Sun, 2 Jun 2024 10:47:28 +0530 Subject: [PATCH 11/35] oops --- beacon_chain/gossip_processing/gossip_validation.nim | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/beacon_chain/gossip_processing/gossip_validation.nim b/beacon_chain/gossip_processing/gossip_validation.nim index 4710c69047..206917509a 100644 --- a/beacon_chain/gossip_processing/gossip_validation.nim +++ b/beacon_chain/gossip_processing/gossip_validation.nim @@ -549,8 +549,8 @@ proc validateDataColumnSidecar*( if block_header.parent_root in quarantine[].unviable: quarantine[].addUnviable(block_root) return dag.checkedReject("DataColumnSidecar: parent not validated") - else: - quarantine[].addMissing(block_header.parent_root) + # else: + # quarantine[].addMissing(block_header.parent_root) # return errIgnore("DataColumnSidecar: parent not found") # [REJECT] The sidecar is proposed by the expected `proposer_index` From c35b5f1b4b5ef4aff54fa5a27ad8630e6fb61b82 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Sun, 2 Jun 2024 10:52:13 +0530 Subject: [PATCH 12/35] disable more --- .../gossip_processing/gossip_validation.nim | 82 +++++++++---------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/beacon_chain/gossip_processing/gossip_validation.nim b/beacon_chain/gossip_processing/gossip_validation.nim index 206917509a..eba2b1d06d 100644 --- a/beacon_chain/gossip_processing/gossip_validation.nim +++ b/beacon_chain/gossip_processing/gossip_validation.nim @@ -536,7 +536,7 @@ proc validateDataColumnSidecar*( return errIgnore("DataColumnSidecar: already have block") if blobQuarantine[].hasBlob( block_header.slot, block_header.proposer_index, data_column_sidecar.index): - return errIgnore("DataColumnSidecar: already have valid blob from same proposer") + return errIgnore("DataColumnSidecar: already have valid data column from same proposer") # [IGNORE] The sidecar's block's parent (defined by # `block_header.parent_root`) has been seen (via both gossip and @@ -545,13 +545,13 @@ proc validateDataColumnSidecar*( # # [REJECT] The sidecar's block's parent (defined by # `block_header.parent_root`) passes validation. - let parent = dag.getBlockRef(block_header.parent_root).valueOr: - if block_header.parent_root in quarantine[].unviable: - quarantine[].addUnviable(block_root) - return dag.checkedReject("DataColumnSidecar: parent not validated") - # else: - # quarantine[].addMissing(block_header.parent_root) - # return errIgnore("DataColumnSidecar: parent not found") + # let parent = dag.getBlockRef(block_header.parent_root).valueOr: + # if block_header.parent_root in quarantine[].unviable: + # quarantine[].addUnviable(block_root) + # return dag.checkedReject("DataColumnSidecar: parent not validated") + # else: + # quarantine[].addMissing(block_header.parent_root) + # return errIgnore("DataColumnSidecar: parent not found") # [REJECT] The sidecar is proposed by the expected `proposer_index` # for the block's slot in the context of the current shuffling @@ -560,47 +560,47 @@ proc validateDataColumnSidecar*( # shuffling, the sidecar MAY be queued for later processing while proposers # for the block's branch are calculated -- in such a case do not # REJECT, instead IGNORE this message. - let proposer = getProposer(dag, parent, block_header.slot).valueOr: - warn "cannot compute proposer for blob" - return errIgnore("DataColumnSidecar: Cannot compute proposer") # internal issue + # let proposer = getProposer(dag, parent, block_header.slot).valueOr: + # warn "cannot compute proposer for blob" + # return errIgnore("DataColumnSidecar: Cannot compute proposer") # internal issue - if uint64(proposer) != block_header.proposer_index: - return dag.checkedReject("DataColumnSidecar: Unexpected proposer") + # if uint64(proposer) != block_header.proposer_index: + # return dag.checkedReject("DataColumnSidecar: Unexpected proposer") # [REJECT] The proposer signature of `blob_sidecar.signed_block_header`, # is valid with respect to the `block_header.proposer_index` pubkey. - if not verify_block_signature( - dag.forkAtEpoch(block_header.slot.epoch), - getStateField(dag.headState, genesis_validators_root), - block_header.slot, - block_root, - dag.validatorKey(proposer).get(), - data_column_sidecar.signed_block_header.signature): - return dag.checkedReject("DataColumnSidecar: Invalid proposer signature") - - # [REJECT] The sidecar is from a higher slot than the sidecar's - # block's parent (defined by `block_header.parent_root`). - if not (block_header.slot > parent.bid.slot): - return dag.checkedReject("DataColumnSidecar: slot lower than parents'") + # if not verify_block_signature( + # dag.forkAtEpoch(block_header.slot.epoch), + # getStateField(dag.headState, genesis_validators_root), + # block_header.slot, + # block_root, + # dag.validatorKey(proposer).get(), + # data_column_sidecar.signed_block_header.signature): + # return dag.checkedReject("DataColumnSidecar: Invalid proposer signature") + + # # [REJECT] The sidecar is from a higher slot than the sidecar's + # # block's parent (defined by `block_header.parent_root`). + # if not (block_header.slot > parent.bid.slot): + # return dag.checkedReject("DataColumnSidecar: slot lower than parents'") # [REJECT] The current finalized_checkpoint is an ancestor of the sidecar's # block -- i.e. `get_checkpoint_block(store, block_header.parent_root, # store.finalized_checkpoint.epoch) == store.finalized_checkpoint.root`. - let - finalized_checkpoint = getStateField(dag.headState, finalized_checkpoint) - ancestor = get_ancestor(parent, finalized_checkpoint.epoch.start_slot) - - if ancestor.isNil: - # This shouldn't happen: we should always be able to trace the parent back - # to the finalized checkpoint (else it wouldn't be in the DAG) - return errIgnore("DataColumnSidecar: Can't find ancestor") - - if not ( - finalized_checkpoint.root == ancestor.root or - finalized_checkpoint.root.isZero): - quarantine[].addUnviable(block_root) - return dag.checkedReject( - "DataColumnSidecar: Finalized checkpoint not an ancestor") + # let + # finalized_checkpoint = getStateField(dag.headState, finalized_checkpoint) + # ancestor = get_ancestor(parent, finalized_checkpoint.epoch.start_slot) + + # if ancestor.isNil: + # # This shouldn't happen: we should always be able to trace the parent back + # # to the finalized checkpoint (else it wouldn't be in the DAG) + # return errIgnore("DataColumnSidecar: Can't find ancestor") + + # if not ( + # finalized_checkpoint.root == ancestor.root or + # finalized_checkpoint.root.isZero): + # quarantine[].addUnviable(block_root) + # return dag.checkedReject( + # "DataColumnSidecar: Finalized checkpoint not an ancestor") ok() From b4810fabebc0557651cd95aafd90fed3cc7f90c9 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Sun, 2 Jun 2024 14:49:38 +0530 Subject: [PATCH 13/35] fix --- beacon_chain/gossip_processing/gossip_validation.nim | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/beacon_chain/gossip_processing/gossip_validation.nim b/beacon_chain/gossip_processing/gossip_validation.nim index eba2b1d06d..958380e046 100644 --- a/beacon_chain/gossip_processing/gossip_validation.nim +++ b/beacon_chain/gossip_processing/gossip_validation.nim @@ -213,12 +213,16 @@ func check_data_column_sidecar_inclusion_proof( if res.isErr: return errReject(res.error) + ok() + proc check_data_column_sidecar_kzg_proofs( data_column_sidecar: DataColumnSidecar): Result[void, ValidationError] = let res = data_column_sidecar.verify_data_column_sidecar_kzg_proofs() if res.isErr: return errReject(res.error) + ok() + # Gossip Validation # ---------------------------------------------------------------- @@ -523,10 +527,10 @@ proc validateDataColumnSidecar*( # # [REJECT] The sidecar's column data is valid as # # verified by `verify_data_column_kzg_proofs(sidecar)` - # block: - # let r = check_data_column_sidecar_kzg_proofs(data_column_sidecar) - # if r.isErr: - # return dag.checkedReject(r.error) + block: + let r = check_data_column_sidecar_kzg_proofs(data_column_sidecar) + if r.isErr: + return dag.checkedReject(r.error) # [IGNORE] The sidecar is the first sidecar for the tuple # (block_header.slot, block_header.proposer_index, blob_sidecar.index) From 9be2e4da9aee0b272680b049d1a9afb0f02544c7 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Mon, 3 Jun 2024 12:06:07 +0530 Subject: [PATCH 14/35] experimental disabling of upstream --- beacon_chain/spec/eip7594_helpers.nim | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index 0eb52001fc..3847a31973 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -201,13 +201,15 @@ proc verify_data_column_sidecar_kzg_proofs*(sidecar: DataColumnSidecar): Result[ sidecarCol = sidecar.column.asSeq kzgProofs = sidecar.kzg_proofs.asSeq - # KZG batch verifies that the cells match the corresponding commitments and KZG proofs - let res = validate_data_column_sidecar( - kzgCommits, - rowIndices, - colIndices, - sidecarCol, - kzgProofs) + let res = verifyCellKzgProofBatch(kzgCommits, rowIndices, colIndices, sidecarCol, kzgProofs) + + # # KZG batch verifies that the cells match the corresponding commitments and KZG proofs + # let res = validate_data_column_sidecar( + # kzgCommits, + # rowIndices, + # colIndices, + # sidecarCol, + # kzgProofs) if res.isErr(): return err("DataColumnSidecar: validation failed") From 29e370e3687fbbdabb6613bc31641cba152345f0 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Wed, 5 Jun 2024 12:55:02 +0530 Subject: [PATCH 15/35] add: EF test harness for KZG EIP7594 (Peerdas) --- tests/consensus_spec/test_fixture_kzg.nim | 173 ++++++++++++++++++++++ vendor/nim-kzg4844 | 2 +- 2 files changed, 174 insertions(+), 1 deletion(-) diff --git a/tests/consensus_spec/test_fixture_kzg.nim b/tests/consensus_spec/test_fixture_kzg.nim index 74ed97f117..6f0c44432a 100644 --- a/tests/consensus_spec/test_fixture_kzg.nim +++ b/tests/consensus_spec/test_fixture_kzg.nim @@ -12,6 +12,7 @@ import std/json, yaml, kzg4844/kzg_ex, + stint, stew/[byteutils, results], ../testutil, ./fixtures_utils, ./os_ops @@ -19,6 +20,14 @@ import from std/sequtils import anyIt, mapIt, toSeq from std/strutils import rsplit +func toUInt64(s: int): Opt[uint64] = + if s < 0: + return Opt.none uint64 + try: + Opt.some uint64(s) + except ValueError: + Opt.none uint64 + func fromHex[N: static int](s: string): Opt[array[N, byte]] = if s.len != 2*(N+1): # 0x prefix @@ -183,6 +192,132 @@ proc runComputeBlobKzgProofTest(suiteName, suitePath, path: string) = else: check p.get == fromHex[48](output.getStr).get +proc runComputeCellsTest(suiteName2, suitePath2, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath2) + test "KZG - Compute Cells - " & relativePathComponent: + let + data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + output = data["output"] + blob = fromHex[131072](data["input"]["blob"].getStr) + + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.2/tests/formats/kzg_7594/verify_cell_kzg_proof.md#condition + # If the blob is invalid (e.g. incorrect length or one of the 32-byte + # blocks does not represent a BLS field element) it should error, i.e. the + # the output should be `null`. + if blob.isNone: + check output.kind == JNull + else: + let p = computeCells(blob.get) + if p.isErr: + check output.kind == JNull + else: + for i in 0..<128: + check p.get[i] == fromHex[2048](output.getStr).get + +proc runComputeCellsAndProofsTest(suiteName2, suitePath2, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath2) + test "KZG - Compute Cells And Proofs - " & relativePathComponent: + let + data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + output = data["output"] + blob = fromHex[131072](data["input"]["blob"].getStr) + + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.2/tests/formats/kzg_7594/verify_cell_kzg_proof.md#condition + # If the blob is invalid (e.g. incorrect length or one of the 32-byte + # blocks does not represent a BLS field element) it should error, i.e. the + # the output should be `null`. + if blob.isNone: + check output.kind == JNull + else: + let p = computeCellsAndProofs(blob.get) + if p.isErr: + check output.kind == JNull + else: + for i in 0..<128: + check p.get.cells[i] == fromHex[2048](output["cells"].getStr).get + check p.get.proofs[i] == fromHex[48](output["proofs"].getStr).get + +proc runVerifyCellKzgProofsTest(suiteName2, suitePath2, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath2) + test "KZG - Verify Cell Kzg Proof - " & relativePathComponent: + let + data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + output = data["output"] + commitment = fromHex[48](data["input"]["commitment"].getStr) + proof = fromHex[48](data["input"]["proof"].getStr) + cell = fromHex[2048](data["input"]["cell"].getStr) + cell_id = toUInt64(data["input"]["cell_id"].getInt) + + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.2/tests/formats/kzg_7594/verify_cell_kzg_proof.md#condition + # If the blob is invalid (e.g. incorrect length or one of the 32-byte + # blocks does not represent a BLS field element) it should error, i.e. the + # the output should be `null`. + if commitment.isNone or proof.isNone or cell.isNone or cell_id.isNone: + check output.kind == JNull + else: + let p = verifyCellKzgProof(commitment.get, cell_id.get, cell.get, proof.get) + if p.isErr: + check output.kind == JNull + else: + check p.get == output.getBool + +proc runVerifyCellKzgProofBatchTest(suiteName2, suitePath2, path: string) = + let relativePathCompnent = path.relativeTestPathComponent(suitePath2) + test "KZG - Verify Cell Kzg Proof Batch - " & relativePathCompnent: + let + data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + output = data["output"] + row_commitments = data["input"]["row_commitments"].mapIt(fromHex[48](it.getStr)) + row_indices = data["input"]["row_indices"].mapIt(toUInt64(it.getInt)) + column_indices = data["input"]["column_indices"].mapIt(toUInt64(it.getInt)) + cells = data["input"]["cells"].mapIt(fromHex[2048](it.getStr)) + proofs = data["input"]["proofs"].mapIt(fromHex[48](it.getStr)) + + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.2/tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md#condition + # If the blob is invalid (e.g. incorrect length or one of the 32-byte + # blocks does not represent a BLS field element) it should error, i.e. the + # the output should be `null`. + if row_commitments.anyIt(it.isNone) or row_indices.anyIt(it.isNone) or + column_indices.anyIt(it.isNone) or proofs.anyIt(it.isNone) or + cells.anyIt(it.isNone): + check output.kind == JNull + else: + let v = verifyCellKzgProofBatch( + row_commitments.mapIt(it.get), + row_indices.mapIt(it.get), + column_indices.mapIt(it.get), + cells.mapIt(it.get), + proofs.mapIt(it.get) + ) + check: + if v.isErr: + output.kind == JNull + else: + v.get == output.getBool + +proc runRecoverAllCellsTest(suiteName2, suitePath2, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath2) + test "KZG - Recover All Cells - " & relativePathComponent: + let + data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] + output = data["output"] + cell_ids = data["input"]["cell_ids"].mapIt(toUInt64(it.getInt)) + cells = data["input"]["cells"].mapIt(fromHex[2048](it.getStr)) + + # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.2/tests/formats/kzg_7594/recover_all_cells.md#condition + # If the blob is invalid (e.g. incorrect length or one of the 32-byte + # blocks does not represent a BLS field element) it should error, i.e. the + # the output should be `null`. + if cell_ids.anyIt(it.isNone) or cells.anyIt(it.isNone): + check output.kind == JNull + else: + let v = recoverAllCells(cell_ids.mapIt(it.get), cells.mapIt(it.get)) + if v.isErr: + check output.kind == JNull + else: + for i in 0..<128: + check v.get[i] == fromHex[2048](output.getStr).get + from std/algorithm import sorted const suiteName = "EF - KZG" @@ -227,4 +362,42 @@ suite suiteName: for kind, path in walkDir(testsDir, relative = true, checkDir = true): runComputeBlobKzgProofTest(suiteName, testsDir, testsDir / path) +doAssert Kzg.freeTrustedSetup().isOk + +const suiteName2 = "EF - KZG - EIP7594" + +suite suiteName2: + const suitePath2 = SszTestsDir/"general"/"eip7594"/"kzg" + + # TODO also check that the only direct subdirectory of each is kzg-mainnet + doAssert sorted(mapIt( + toSeq(walkDir(suitePath2, relative = true, checkDir = true)), it.path)) == + ["compute_cells", "compute_cells_and_kzg_proofs", "recover_all_cells", + "verify_cell_kzg_proof", "verify_cell_kzg_proof_batch"] + + block: + let testsDir = suitePath2/"compute_cells"/"kzg-mainnet" + for kind, path in walkDir(testsDir, relative = true, checkDir = true): + runComputeCellsTest(suiteName2, testsDir, testsDir/path) + + block: + let testsDir = suitePath2/"compute_cells_and_kzg_proofs"/"kzg-mainnet" + for kind, path in walkDir(testsDir, relative = true, checkDir = true): + runComputeCellsAndProofsTest(suiteName2, testsDir, testsDir/path) + + block: + let testsDir = suitePath2/"recover_all_cells"/"kzg-mainnet" + for kind, path in walkDir(testsDir, relative = true, checkDir = true): + runRecoverAllCellsTest(suiteName2, testsDir, testsDir/path) + + block: + let testsDir = suitePath2/"verify_cell_kzg_proof"/"kzg-mainnet" + for kind, path in walkDir(testsDir, relative = true, checkDir = true): + runVerifyCellKzgProofsTest(suiteName2, testsDir, testsDir/path) + + block: + let testsDir = suitePath2/"verify_cell_kzg_proof_batch"/"kzg-mainnet" + for kind, path in walkDir(testsDir, relative = true, checkDir = true): + runVerifyCellKzgProofBatchTest(suiteName2, testsDir, testsDir/path) + doAssert Kzg.freeTrustedSetup().isOk \ No newline at end of file diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index b3612edd29..4687dea4d4 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit b3612edd296137e6d383223a080d6dbe8370eebb +Subproject commit 4687dea4d423b508b673e775f66a2d20596604d1 From 452a39b90c2a22431f1b4e8f3a21ae3cd30da737 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Wed, 5 Jun 2024 17:29:58 +0530 Subject: [PATCH 16/35] bumped nim-kzg4844 --- vendor/nim-kzg4844 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 4687dea4d4..34ad9a62e1 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 4687dea4d423b508b673e775f66a2d20596604d1 +Subproject commit 34ad9a62e1afce040afa6ad7bbdfbffe19c85cc8 From 8565bab5b4d1e836a28b097668789f7dad0458f5 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Wed, 5 Jun 2024 17:52:39 +0530 Subject: [PATCH 17/35] bump nim-kzg4844 to 9f54f2f83eb64be7571e5450c805f862e3e95780 --- vendor/nim-kzg4844 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 34ad9a62e1..9f54f2f83e 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 34ad9a62e1afce040afa6ad7bbdfbffe19c85cc8 +Subproject commit 9f54f2f83eb64be7571e5450c805f862e3e95780 From 3ef4af066f10b2db18a4e014c6a467b8c6c62f50 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Wed, 5 Jun 2024 18:10:24 +0530 Subject: [PATCH 18/35] rename func name to avoid conflicts --- tests/consensus_spec/test_fixture_kzg.nim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/consensus_spec/test_fixture_kzg.nim b/tests/consensus_spec/test_fixture_kzg.nim index 6f0c44432a..0e24cfecda 100644 --- a/tests/consensus_spec/test_fixture_kzg.nim +++ b/tests/consensus_spec/test_fixture_kzg.nim @@ -207,7 +207,7 @@ proc runComputeCellsTest(suiteName2, suitePath2, path: string) = if blob.isNone: check output.kind == JNull else: - let p = computeCells(blob.get) + let p = computeCellsKzg(blob.get) if p.isErr: check output.kind == JNull else: From 5c12be0cc295803015d367a9cc1832325e8d144d Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Wed, 5 Jun 2024 22:12:29 +0530 Subject: [PATCH 19/35] bumped nim-kzg4844 to d915948dd58c2ad23b551cd408066046cf5e46db --- tests/consensus_spec/test_fixture_kzg.nim | 4 +++- vendor/nim-kzg4844 | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/consensus_spec/test_fixture_kzg.nim b/tests/consensus_spec/test_fixture_kzg.nim index 0e24cfecda..539e6b79af 100644 --- a/tests/consensus_spec/test_fixture_kzg.nim +++ b/tests/consensus_spec/test_fixture_kzg.nim @@ -13,6 +13,7 @@ import yaml, kzg4844/kzg_ex, stint, + chronicles, stew/[byteutils, results], ../testutil, ./fixtures_utils, ./os_ops @@ -207,8 +208,9 @@ proc runComputeCellsTest(suiteName2, suitePath2, path: string) = if blob.isNone: check output.kind == JNull else: - let p = computeCellsKzg(blob.get) + let p = computeCells(blob.get) if p.isErr: + fatal "Failed to retrieve p value", err = p.error check output.kind == JNull else: for i in 0..<128: diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 9f54f2f83e..d915948dd5 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 9f54f2f83eb64be7571e5450c805f862e3e95780 +Subproject commit d915948dd58c2ad23b551cd408066046cf5e46db From 49b34958bb63208d7bb7d2b21f6306ddfe211303 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Thu, 6 Jun 2024 01:18:18 +0530 Subject: [PATCH 20/35] fix: test_fixture_kzg --- tests/consensus_spec/test_fixture_kzg.nim | 61 +++++++++++------------ 1 file changed, 29 insertions(+), 32 deletions(-) diff --git a/tests/consensus_spec/test_fixture_kzg.nim b/tests/consensus_spec/test_fixture_kzg.nim index 539e6b79af..71907372ac 100644 --- a/tests/consensus_spec/test_fixture_kzg.nim +++ b/tests/consensus_spec/test_fixture_kzg.nim @@ -193,8 +193,8 @@ proc runComputeBlobKzgProofTest(suiteName, suitePath, path: string) = else: check p.get == fromHex[48](output.getStr).get -proc runComputeCellsTest(suiteName2, suitePath2, path: string) = - let relativePathComponent = path.relativeTestPathComponent(suitePath2) +proc runComputeCellsTest(suiteName, suitePath, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Compute Cells - " & relativePathComponent: let data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] @@ -210,14 +210,13 @@ proc runComputeCellsTest(suiteName2, suitePath2, path: string) = else: let p = computeCells(blob.get) if p.isErr: - fatal "Failed to retrieve p value", err = p.error check output.kind == JNull else: for i in 0..<128: - check p.get[i] == fromHex[2048](output.getStr).get + check p.get[i] == fromHex[2048](output[i].getStr).get -proc runComputeCellsAndProofsTest(suiteName2, suitePath2, path: string) = - let relativePathComponent = path.relativeTestPathComponent(suitePath2) +proc runComputeCellsAndProofsTest(suiteName, suitePath, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Compute Cells And Proofs - " & relativePathComponent: let data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] @@ -236,11 +235,11 @@ proc runComputeCellsAndProofsTest(suiteName2, suitePath2, path: string) = check output.kind == JNull else: for i in 0..<128: - check p.get.cells[i] == fromHex[2048](output["cells"].getStr).get - check p.get.proofs[i] == fromHex[48](output["proofs"].getStr).get + check p.get.cells[i] == fromHex[2048](output[0][i].getStr).get + check p.get.proofs[i] == fromHex[48](output[1][i].getStr).get -proc runVerifyCellKzgProofsTest(suiteName2, suitePath2, path: string) = - let relativePathComponent = path.relativeTestPathComponent(suitePath2) +proc runVerifyCellKzgProofsTest(suiteName, suitePath, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Verify Cell Kzg Proof - " & relativePathComponent: let data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] @@ -263,8 +262,8 @@ proc runVerifyCellKzgProofsTest(suiteName2, suitePath2, path: string) = else: check p.get == output.getBool -proc runVerifyCellKzgProofBatchTest(suiteName2, suitePath2, path: string) = - let relativePathCompnent = path.relativeTestPathComponent(suitePath2) +proc runVerifyCellKzgProofBatchTest(suiteName, suitePath, path: string) = + let relativePathCompnent = path.relativeTestPathComponent(suitePath) test "KZG - Verify Cell Kzg Proof Batch - " & relativePathCompnent: let data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] @@ -297,8 +296,8 @@ proc runVerifyCellKzgProofBatchTest(suiteName2, suitePath2, path: string) = else: v.get == output.getBool -proc runRecoverAllCellsTest(suiteName2, suitePath2, path: string) = - let relativePathComponent = path.relativeTestPathComponent(suitePath2) +proc runRecoverAllCellsTest(suiteName, suitePath, path: string) = + let relativePathComponent = path.relativeTestPathComponent(suitePath) test "KZG - Recover All Cells - " & relativePathComponent: let data = yaml.loadToJson(os_ops.readFile(path/"data.yaml"))[0] @@ -318,11 +317,11 @@ proc runRecoverAllCellsTest(suiteName2, suitePath2, path: string) = check output.kind == JNull else: for i in 0..<128: - check v.get[i] == fromHex[2048](output.getStr).get + check v.get[i] == fromHex[2048](output[i].getStr).get from std/algorithm import sorted -const suiteName = "EF - KZG" +var suiteName = "EF - KZG" suite suiteName: const suitePath = SszTestsDir/"general"/"deneb"/"kzg" @@ -364,42 +363,40 @@ suite suiteName: for kind, path in walkDir(testsDir, relative = true, checkDir = true): runComputeBlobKzgProofTest(suiteName, testsDir, testsDir / path) -doAssert Kzg.freeTrustedSetup().isOk +suiteName = "EF - KZG - EIP7594" -const suiteName2 = "EF - KZG - EIP7594" - -suite suiteName2: - const suitePath2 = SszTestsDir/"general"/"eip7594"/"kzg" +suite suiteName: + const suitePath = SszTestsDir/"general"/"eip7594"/"kzg" # TODO also check that the only direct subdirectory of each is kzg-mainnet doAssert sorted(mapIt( - toSeq(walkDir(suitePath2, relative = true, checkDir = true)), it.path)) == + toSeq(walkDir(suitePath, relative = true, checkDir = true)), it.path)) == ["compute_cells", "compute_cells_and_kzg_proofs", "recover_all_cells", "verify_cell_kzg_proof", "verify_cell_kzg_proof_batch"] block: - let testsDir = suitePath2/"compute_cells"/"kzg-mainnet" + let testsDir = suitePath/"compute_cells"/"kzg-mainnet" for kind, path in walkDir(testsDir, relative = true, checkDir = true): - runComputeCellsTest(suiteName2, testsDir, testsDir/path) + runComputeCellsTest(suiteName, testsDir, testsDir/path) block: - let testsDir = suitePath2/"compute_cells_and_kzg_proofs"/"kzg-mainnet" + let testsDir = suitePath/"compute_cells_and_kzg_proofs"/"kzg-mainnet" for kind, path in walkDir(testsDir, relative = true, checkDir = true): - runComputeCellsAndProofsTest(suiteName2, testsDir, testsDir/path) + runComputeCellsAndProofsTest(suiteName, testsDir, testsDir/path) block: - let testsDir = suitePath2/"recover_all_cells"/"kzg-mainnet" + let testsDir = suitePath/"recover_all_cells"/"kzg-mainnet" for kind, path in walkDir(testsDir, relative = true, checkDir = true): - runRecoverAllCellsTest(suiteName2, testsDir, testsDir/path) + runRecoverAllCellsTest(suiteName, testsDir, testsDir/path) block: - let testsDir = suitePath2/"verify_cell_kzg_proof"/"kzg-mainnet" + let testsDir = suitePath/"verify_cell_kzg_proof"/"kzg-mainnet" for kind, path in walkDir(testsDir, relative = true, checkDir = true): - runVerifyCellKzgProofsTest(suiteName2, testsDir, testsDir/path) + runVerifyCellKzgProofsTest(suiteName, testsDir, testsDir/path) block: - let testsDir = suitePath2/"verify_cell_kzg_proof_batch"/"kzg-mainnet" + let testsDir = suitePath/"verify_cell_kzg_proof_batch"/"kzg-mainnet" for kind, path in walkDir(testsDir, relative = true, checkDir = true): - runVerifyCellKzgProofBatchTest(suiteName2, testsDir, testsDir/path) + runVerifyCellKzgProofBatchTest(suiteName, testsDir, testsDir/path) doAssert Kzg.freeTrustedSetup().isOk \ No newline at end of file From a8accf697a7c16d7307f7249efccfa3d16ca5b43 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Thu, 6 Jun 2024 12:09:00 +0530 Subject: [PATCH 21/35] updated test suite with passing tests for KZG EIP7594 --- AllTests-mainnet.md | 113 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 112 insertions(+), 1 deletion(-) diff --git a/AllTests-mainnet.md b/AllTests-mainnet.md index ebdb530e54..3e3e6f20b7 100644 --- a/AllTests-mainnet.md +++ b/AllTests-mainnet.md @@ -429,6 +429,117 @@ OK: 2/2 Fail: 0/2 Skip: 0/2 + KZG - Verify blob KZG proof batch - verify_blob_kzg_proof_batch_case_proof_length_differen OK ``` OK: 253/253 Fail: 0/253 Skip: 0/253 +## EF - KZG - EIP7594 +```diff ++ KZG - Compute Cells - compute_cells_case_invalid_blob_26555bdcbf18a267 OK ++ KZG - Compute Cells - compute_cells_case_invalid_blob_79fb3cb1ef585a86 OK ++ KZG - Compute Cells - compute_cells_case_invalid_blob_7e99dea8893c104a OK ++ KZG - Compute Cells - compute_cells_case_invalid_blob_9d88c33852eb782d OK ++ KZG - Compute Cells - compute_cells_case_valid_419245fbfe69f145 OK ++ KZG - Compute Cells - compute_cells_case_valid_4aedd1a2a3933c3e OK ++ KZG - Compute Cells - compute_cells_case_valid_6e773f256383918c OK ++ KZG - Compute Cells - compute_cells_case_valid_b0731ef77b166ca8 OK ++ KZG - Compute Cells - compute_cells_case_valid_b81d309b22788820 OK ++ KZG - Compute Cells - compute_cells_case_valid_ed8b5001151417d5 OK ++ KZG - Compute Cells - compute_cells_case_valid_edeb8500a6507818 OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_invalid_blob_26555bdcbf OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_invalid_blob_79fb3cb1ef OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_invalid_blob_7e99dea889 OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_invalid_blob_9d88c33852 OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_valid_419245fbfe69f145 OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_valid_4aedd1a2a3933c3e OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_valid_6e773f256383918c OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_valid_b0731ef77b166ca8 OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_valid_b81d309b22788820 OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_valid_ed8b5001151417d5 OK ++ KZG - Compute Cells And Proofs - compute_cells_and_kzg_proofs_case_valid_edeb8500a6507818 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_all_cells_are_missing_f46bf2cbb03 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_cell_0f26a378535d3131 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_cell_7a3f7f2910fe230a OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_cell_8be2d351449aa7b6 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_cell_e1ac5e027103239d OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_cell_id_be00192b1a139275 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_duplicate_cell_id_988d8aa16e4ef84 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_more_cell_ids_than_cells_8eaea8a3 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_more_cells_than_cell_ids_a2b10ac8 OK ++ KZG - Recover All Cells - recover_all_cells_case_invalid_more_than_half_missing_474f5c5c2a OK ++ KZG - Recover All Cells - recover_all_cells_case_valid_half_missing_every_other_cell_ae1b7 OK ++ KZG - Recover All Cells - recover_all_cells_case_valid_half_missing_first_half_bbb851083a6 OK ++ KZG - Recover All Cells - recover_all_cells_case_valid_half_missing_second_half_696b33f5da OK ++ KZG - Recover All Cells - recover_all_cells_case_valid_no_missing_9546b3ad9977aa40 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_cell_30dd1bdc76ff70fb OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_cell_5138cdd3534e8705 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_cell_76140fc51e7da7a5 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_commitment_307f4ebc067c OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_commitment_351fd262b984 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_commitment_71fec3ac464b OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_commitment_736703b3e23d OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_commitment_7c1a1ac24c1f OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_commitment_9624a42384c3 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_commitment_aef3e72488c4 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_proof_0223e6a42aeb7c72 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_proof_0ed7c15183b218d9 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_proof_29635b8440e1e10f OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_proof_504a37d7088fa4e7 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_proof_65e1ad97362a27d8 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_proof_ab041dcc87d0a4fc OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_incorrect_proof_dcf5a8bd294aaa6f OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_cell_1535daa3d170da94 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_cell_1962af1b36fc07b2 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_cell_b9598308bd764e64 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_cell_e29abaaa0519a74f OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_cell_id_683cc4551f0ad97e OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_cell_id_f134fd5b36145b80 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_commitment_ac0c6311a92593 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_commitment_afe4829eb27b14 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_commitment_ebd7c7f8c02f05 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_commitment_fd08e705ede464 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_proof_0c35bb98c57669db OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_proof_25efe063234b38bb OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_proof_50589f444e37d476 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_invalid_proof_f900beacae9218db OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_valid_0c0acf27962a7e82 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_valid_402b30d8dc9e972d OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_valid_7db1d069d57ec097 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_valid_b4c7e9397878471c OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_valid_cc46f83ded6d6191 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_valid_dcf3e16a678fadc5 OK ++ KZG - Verify Cell Kzg Proof - verify_cell_kzg_proof_case_valid_f6d5ccfa04edf349 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_incorrect_cell_9ff2df OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_incorrect_proof_59c63 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_incorrect_row_commitm OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_cell_2e1699f9 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_cell_5f0a7e48 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_cell_745046c5 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_cell_83f39012 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_column_index_ OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_missing_cell_ OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_missing_colum OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_missing_proof OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_missing_row_c OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_missing_row_i OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_proof_135836e OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_proof_d592b72 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_proof_e65b54c OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_proof_eded2aa OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_row_commitmen OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_row_commitmen OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_row_commitmen OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_row_commitmen OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_invalid_row_index_55c OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_21b209cb4f64d0e OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_49f1f992af68d85 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_7dc4b00d04efff0 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_abe54dfc8ce6f34 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_ae0a9c4f3313b3d OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_aedf5f25f4e3eea OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_fad5448f3ceb097 OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_multiple_blobs_ OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_same_cell_multi OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_unused_row_comm OK ++ KZG - Verify Cell Kzg Proof Batch - verify_cell_kzg_proof_batch_case_valid_zero_cells_92ee OK +``` +OK: 107/107 Fail: 0/107 Skip: 0/107 ## EF - SSZ generic types ```diff Testing basic_vector inputs - invalid Skip @@ -1030,4 +1141,4 @@ OK: 2/2 Fail: 0/2 Skip: 0/2 OK: 9/9 Fail: 0/9 Skip: 0/9 ---TOTAL--- -OK: 687/692 Fail: 0/692 Skip: 5/692 +OK: 794/799 Fail: 0/799 Skip: 5/799 From c0ade0c0051449d72fee0affb5e7fd55332b3d4a Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 7 Jun 2024 13:10:28 +0530 Subject: [PATCH 22/35] added test_fixture_networking for peerdas --- beacon_chain/spec/eip7594_helpers.nim | 7 +- .../test_fixture_networking.nim | 95 +++++++++++++++++++ 2 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 tests/consensus_spec/test_fixture_networking.nim diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index 3847a31973..1e9505c7db 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -39,8 +39,11 @@ proc get_custody_columns*(node_id: NodeId, custody_subnet_count: uint64): Result current_id = node_id while subnet_ids.len < int(custody_subnet_count): - let subnet_id_bytes = eth2digest(current_id.toBytesLE().toOpenArray(0,8)) - var subnet_id = bytes_to_uint64(subnet_id_bytes.data) mod + + var subnet_id_bytes: array[8, byte] + subnet_id_bytes[0..7] = current_id.toBytesLE().toOpenArray(0,7) + + var subnet_id = bytes_to_uint64(subnet_id_bytes) mod DATA_COLUMN_SIDECAR_SUBNET_COUNT if subnet_id notin subnet_ids: diff --git a/tests/consensus_spec/test_fixture_networking.nim b/tests/consensus_spec/test_fixture_networking.nim new file mode 100644 index 0000000000..af6b9675a4 --- /dev/null +++ b/tests/consensus_spec/test_fixture_networking.nim @@ -0,0 +1,95 @@ +# beacon_chain +# Copyright (c) 2024 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} +{.used.} + +import + std/[json, streams], + yaml, + kzg4844/kzg_ex, + stint, + chronicles, + eth/p2p/discoveryv5/[node], + stew/[byteutils, results], + ../../beacon_chain/spec/eip7594_helpers, + ../testutil, + ./fixtures_utils, ./os_ops + +from std/sequtils import anyIt, mapIt, toSeq +from std/strutils import rsplit + +func toUInt64(s: SomeInteger): Opt[uint64] = + if s < 0: + return Opt.none uint64 + try: + Opt.some uint64(s) + except ValueError: + Opt.none uint64 + +# func toUInt256(s: SomeInteger): Opt[UInt256] = +# if s < 0: +# return Opt.none UInt256 +# try: +# Opt.some u256(s) +# except ValueError: +# Opt.none + +func fromHex[N: static int](s: string): Opt[array[N, byte]] = + if s.len != 2*(N+1): + # 0x prefix + return Opt.none array[N, byte] + + try: + Opt.some fromHex(array[N, byte], s) + except ValueError: + Opt.none array[N, byte] + +proc runGetCustodyColumns(suiteName, path: string) = + let relativePathComponent = path.relativeTestPathComponent() + test "Networking - Get Custody Columns - " & relativePathComponent: + type TestMetaYaml = object + node_id: string + custody_subnet_count: uint64 + result: Option[seq[uint64]] + let + meta = block: + var s = openFileStream(path/"meta.yaml") + defer: close(s) + var res: TestMetaYaml + yaml.load(s, res) + res + node_id = UInt256.fromDecimal(meta.node_id) + custody_subnet_count = toUInt64(meta.custody_subnet_count) + reslt = (meta.result.get).mapIt(uint64(it)) + + if custody_subnet_count.isNone: + check meta.result.isNone + else: + let columns = get_custody_columns(node_id, custody_subnet_count.get) + if columns.isErr: + check meta.result.isNone + else: + var count = 0 + for column in columns.get: + check column == uint64(reslt[count]) + count = count + 1 + +from std/algorithm import sorted + +var suiteName = "EF - EIP7594" + +suite "EF - EIP7594 - Networking" & preset(): + const presetPath = SszTestsDir/"minimal" + let basePath = + presetPath/"eip7594"/"networking"/"get_custody_columns"/"pyspec_tests" + for kind, path in walkDir(basePath, relative = true, checkDir = true): + runGetCustodyColumns(suiteName, basePath/path) + + + + \ No newline at end of file From 6f87a30d33fa71264991e5763687b88d7089a572 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 7 Jun 2024 13:11:08 +0530 Subject: [PATCH 23/35] remove commented code --- tests/consensus_spec/test_fixture_networking.nim | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/consensus_spec/test_fixture_networking.nim b/tests/consensus_spec/test_fixture_networking.nim index af6b9675a4..4a56df2ef1 100644 --- a/tests/consensus_spec/test_fixture_networking.nim +++ b/tests/consensus_spec/test_fixture_networking.nim @@ -31,14 +31,6 @@ func toUInt64(s: SomeInteger): Opt[uint64] = except ValueError: Opt.none uint64 -# func toUInt256(s: SomeInteger): Opt[UInt256] = -# if s < 0: -# return Opt.none UInt256 -# try: -# Opt.some u256(s) -# except ValueError: -# Opt.none - func fromHex[N: static int](s: string): Opt[array[N, byte]] = if s.len != 2*(N+1): # 0x prefix From 869f41b9d92a98df2f0195698e6c059f839eb625 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 7 Jun 2024 13:32:04 +0530 Subject: [PATCH 24/35] fix: folder auto --- tests/consensus_spec/test_fixture_networking.nim | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/consensus_spec/test_fixture_networking.nim b/tests/consensus_spec/test_fixture_networking.nim index 4a56df2ef1..bd82e728d7 100644 --- a/tests/consensus_spec/test_fixture_networking.nim +++ b/tests/consensus_spec/test_fixture_networking.nim @@ -71,12 +71,8 @@ proc runGetCustodyColumns(suiteName, path: string) = check column == uint64(reslt[count]) count = count + 1 -from std/algorithm import sorted - -var suiteName = "EF - EIP7594" - suite "EF - EIP7594 - Networking" & preset(): - const presetPath = SszTestsDir/"minimal" + const presetPath = SszTestsDir/const_preset let basePath = presetPath/"eip7594"/"networking"/"get_custody_columns"/"pyspec_tests" for kind, path in walkDir(basePath, relative = true, checkDir = true): From 427e942c7251c1891daead9e70004dacf7b8342e Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 7 Jun 2024 15:09:39 +0530 Subject: [PATCH 25/35] add: test pass, added test to suite, added test report --- AllTests-mainnet.md | 15 ++++++++++++++- tests/consensus_spec/all_tests.nim | 1 + 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/AllTests-mainnet.md b/AllTests-mainnet.md index 3e3e6f20b7..074e8d1f3f 100644 --- a/AllTests-mainnet.md +++ b/AllTests-mainnet.md @@ -172,6 +172,19 @@ OK: 1/1 Fail: 0/1 Skip: 0/1 + Tail block only in common OK ``` OK: 2/2 Fail: 0/2 Skip: 0/2 +## EF - EIP7594 - Networking [Preset: mainnet] +```diff ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK +``` +OK: 9/9 Fail: 0/9 Skip: 0/9 ## EF - KZG ```diff + KZG - Blob to KZG commitment - blob_to_kzg_commitment_case_invalid_blob_59d64ff6b4648fad OK @@ -1141,4 +1154,4 @@ OK: 2/2 Fail: 0/2 Skip: 0/2 OK: 9/9 Fail: 0/9 Skip: 0/9 ---TOTAL--- -OK: 794/799 Fail: 0/799 Skip: 5/799 +OK: 803/808 Fail: 0/808 Skip: 5/808 diff --git a/tests/consensus_spec/all_tests.nim b/tests/consensus_spec/all_tests.nim index a0f3af2201..d17bf5f540 100644 --- a/tests/consensus_spec/all_tests.nim +++ b/tests/consensus_spec/all_tests.nim @@ -15,4 +15,5 @@ import ./test_fixture_kzg, + ./test_fixture_networking, ./test_fixture_ssz_generic_types From 7230128c256f0b662298428657a500efb7cb69cf Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 7 Jun 2024 17:13:20 +0530 Subject: [PATCH 26/35] update: test report --- AllTests-mainnet.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/AllTests-mainnet.md b/AllTests-mainnet.md index 074e8d1f3f..fefc391bc6 100644 --- a/AllTests-mainnet.md +++ b/AllTests-mainnet.md @@ -174,15 +174,15 @@ OK: 1/1 Fail: 0/1 Skip: 0/1 OK: 2/2 Fail: 0/2 Skip: 0/2 ## EF - EIP7594 - Networking [Preset: mainnet] ```diff -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK -+ Networking - Get Custody Columns - minimal/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ++ Networking - Get Custody Columns - mainnet/eip7594/networking/get_custody_columns/pyspec_t OK ``` OK: 9/9 Fail: 0/9 Skip: 0/9 ## EF - KZG From 282b716ed21418154cf654f7af3e3c27cc8ee116 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Sun, 9 Jun 2024 11:04:42 +0530 Subject: [PATCH 27/35] add: test for ssz consensus objects --- beacon_chain/spec/datatypes/eip7594.nim | 4 + .../test_fixture_ssz_consensus_objects.nim | 182 ++++++++++++++++++ 2 files changed, 186 insertions(+) create mode 100644 tests/consensus_spec/eip7594/test_fixture_ssz_consensus_objects.nim diff --git a/beacon_chain/spec/datatypes/eip7594.nim b/beacon_chain/spec/datatypes/eip7594.nim index b1f4d7fc53..cfdafefd79 100644 --- a/beacon_chain/spec/datatypes/eip7594.nim +++ b/beacon_chain/spec/datatypes/eip7594.nim @@ -57,6 +57,10 @@ type kzg_commitments_inclusion_proof*: array[KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, Eth2Digest] + DataColumnIdentifier* = object + block_root*: Eth2Digest + index*: ColumnIndex + func shortLog*(v: DataColumnSidecar): auto = ( index: v.index, diff --git a/tests/consensus_spec/eip7594/test_fixture_ssz_consensus_objects.nim b/tests/consensus_spec/eip7594/test_fixture_ssz_consensus_objects.nim new file mode 100644 index 0000000000..e23a5647dd --- /dev/null +++ b/tests/consensus_spec/eip7594/test_fixture_ssz_consensus_objects.nim @@ -0,0 +1,182 @@ +# beacon_chain +# Copyright (c) 2022-2024 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} +{.used.} + +import + # Standard library + std/[ + strutils, streams, strformat, + macros, sets], + # Third-party + yaml, + # Beacon chain internals + ../../../beacon_chain/spec/datatypes/[ + altair, + deneb, + eip7594], + # Status libraries + snappy, + # Test utilities + ../../testutil, ../fixtures_utils, ../os_ops + +from ../../../beacon_chain/spec/datatypes/bellatrix import PowBlock +from ../../../beacon_chain/spec/datatypes/capella import + BLSToExecutionChange, SignedBLSToExecutionChange, HistoricalSummary, + Withdrawal + + +# SSZ tests of consensus objects (minimal/mainnet preset specific) + +# Parsing definitions +# ---------------------------------------------------------------- + +const + SSZDir = SszTestsDir/const_preset/"eip7594"/"ssz_static" + +type + SSZHashTreeRoot = object + # The test files have the values at the "root" + # so we **must** use "root" as a field name + root: string + # Some have a signing_root field + signing_root {.defaultVal: "".}: string + +# Note this only tracks HashTreeRoot +# Checking the values against the yaml file is TODO (require more flexible Yaml parser) + +proc checkSSZ( + T: type deneb.SignedBeaconBlock, + dir: string, + expectedHash: SSZHashTreeRoot +) {.raises: [IOError, SerializationError, UnconsumedInput].} = + # Deserialize into a ref object to not fill Nim stack + let encoded = snappy.decode( + readFileBytes(dir/"serialized.ssz_snappy"), MaxObjectSize) + let deserialized = newClone(sszDecodeEntireInput(encoded, T)) + + # SignedBeaconBlocks usually not hashed because they're identified by + # htr(BeaconBlock), so do it manually + check: expectedHash.root == "0x" & toLowerAscii($hash_tree_root( + [hash_tree_root(deserialized.message), + hash_tree_root(deserialized.signature)])) + + check deserialized.root == hash_tree_root(deserialized.message) + check SSZ.encode(deserialized[]) == encoded + check sszSize(deserialized[]) == encoded.len + + # TODO check the value (requires YAML loader) + +proc checkSSZ( + T: type, + dir: string, + expectedHash: SSZHashTreeRoot +) {.raises: [IOError, SerializationError, UnconsumedInput].} = + # Deserialize into a ref object to not fill Nim stack + let encoded = snappy.decode( + readFileBytes(dir/"serialized.ssz_snappy"), MaxObjectSize) + let deserialized = newClone(sszDecodeEntireInput(encoded, T)) + + check: expectedHash.root == "0x" & toLowerAscii($hash_tree_root(deserialized[])) + + check SSZ.encode(deserialized[]) == encoded + check sszSize(deserialized[]) == encoded.len + + # TODO check the value (requires YAML loader) + +proc loadExpectedHashTreeRoot( + dir: string +): SSZHashTreeRoot {.raises: [ + Exception, IOError, OSError, YamlConstructionError, YamlParserError].} = + let s = openFileStream(dir/"roots.yaml") + yaml.load(s, result) + s.close() + +# Test runner +# ---------------------------------------------------------------- + +suite "EF - EIP7594 - SSZ consensus objects " & preset(): + doAssert dirExists(SSZDir), "You need to run the \"download_test_vectors.sh\" script to retrieve the consensus spec test vectors." + for pathKind, sszType in walkDir(SSZDir, relative = true, checkDir = true): + doAssert pathKind == pcDir + + test &" Testing {sszType}": + let path = SSZDir/sszType + for pathKind, sszTestKind in walkDir( + path, relative = true, checkDir = true): + doAssert pathKind == pcDir + let path = SSZDir/sszType/sszTestKind + for pathKind, sszTestCase in walkDir( + path, relative = true, checkDir = true): + let path = SSZDir/sszType/sszTestKind/sszTestCase + let hash = loadExpectedHashTreeRoot(path) + + case sszType: + of "AggregateAndProof": checkSSZ(AggregateAndProof, path, hash) + of "Attestation": checkSSZ(Attestation, path, hash) + of "AttestationData": checkSSZ(AttestationData, path, hash) + of "AttesterSlashing": checkSSZ(AttesterSlashing, path, hash) + of "BeaconBlock": checkSSZ(deneb.BeaconBlock, path, hash) + of "BeaconBlockBody": checkSSZ(deneb.BeaconBlockBody, path, hash) + of "BeaconBlockHeader": checkSSZ(BeaconBlockHeader, path, hash) + of "BeaconState": checkSSZ(deneb.BeaconState, path, hash) + of "BlobIdentifier": checkSSZ(BlobIdentifier, path, hash) + of "BlobSidecar": checkSSZ(BlobSidecar, path, hash) + of "BLSToExecutionChange": checkSSZ(BLSToExecutionChange, path, hash) + of "Checkpoint": checkSSZ(Checkpoint, path, hash) + of "ContributionAndProof": checkSSZ(ContributionAndProof, path, hash) + of "DataColumnIdentifier": checkSSZ(DataColumnIdentifier, path, hash) + of "DataColumnSidecar": checkSSZ(DataColumnSidecar, path, hash) + of "Deposit": checkSSZ(Deposit, path, hash) + of "DepositData": checkSSZ(DepositData, path, hash) + of "DepositMessage": checkSSZ(DepositMessage, path, hash) + of "Eth1Block": checkSSZ(Eth1Block, path, hash) + of "Eth1Data": checkSSZ(Eth1Data, path, hash) + of "ExecutionPayload": checkSSZ(ExecutionPayload, path, hash) + of "ExecutionPayloadHeader": + checkSSZ(ExecutionPayloadHeader, path, hash) + of "Fork": checkSSZ(Fork, path, hash) + of "ForkData": checkSSZ(ForkData, path, hash) + of "HistoricalBatch": checkSSZ(HistoricalBatch, path, hash) + of "HistoricalSummary": checkSSZ(HistoricalSummary, path, hash) + of "IndexedAttestation": checkSSZ(IndexedAttestation, path, hash) + of "LightClientBootstrap": + checkSSZ(deneb.LightClientBootstrap, path, hash) + of "LightClientHeader": checkSSZ(deneb.LightClientHeader, path, hash) + of "LightClientUpdate": checkSSZ(deneb.LightClientUpdate, path, hash) + of "LightClientFinalityUpdate": + checkSSZ(deneb.LightClientFinalityUpdate, path, hash) + of "LightClientOptimisticUpdate": + checkSSZ(deneb.LightClientOptimisticUpdate, path, hash) + of "PendingAttestation": checkSSZ(PendingAttestation, path, hash) + of "PowBlock": checkSSZ(PowBlock, path, hash) + of "ProposerSlashing": checkSSZ(ProposerSlashing, path, hash) + of "SignedAggregateAndProof": + checkSSZ(SignedAggregateAndProof, path, hash) + of "SignedBeaconBlock": + checkSSZ(deneb.SignedBeaconBlock, path, hash) + of "SignedBeaconBlockHeader": + checkSSZ(SignedBeaconBlockHeader, path, hash) + of "SignedBLSToExecutionChange": + checkSSZ(SignedBLSToExecutionChange, path, hash) + of "SignedContributionAndProof": + checkSSZ(SignedContributionAndProof, path, hash) + of "SignedVoluntaryExit": checkSSZ(SignedVoluntaryExit, path, hash) + of "SigningData": checkSSZ(SigningData, path, hash) + of "SyncAggregate": checkSSZ(SyncAggregate, path, hash) + of "SyncAggregatorSelectionData": + checkSSZ(SyncAggregatorSelectionData, path, hash) + of "SyncCommittee": checkSSZ(SyncCommittee, path, hash) + of "SyncCommitteeContribution": + checkSSZ(SyncCommitteeContribution, path, hash) + of "SyncCommitteeMessage": checkSSZ(SyncCommitteeMessage, path, hash) + of "Withdrawal": checkSSZ(Withdrawal, path, hash) + of "Validator": checkSSZ(Validator, path, hash) + of "VoluntaryExit": checkSSZ(VoluntaryExit, path, hash) + else: + raise newException(ValueError, "Unsupported test: " & sszType) From 3e995d938f61c87c0549d1a2d81a8a237077e69f Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Mon, 10 Jun 2024 00:13:31 +0530 Subject: [PATCH 28/35] add: passing tests and test report for ssz consensus objects (eip7594) --- ConsensusSpecPreset-mainnet.md | 56 ++++++++++++++++++- ConsensusSpecPreset-minimal.md | 56 ++++++++++++++++++- .../consensus_spec_tests_preset.nim | 1 + .../eip7594/all_eip7594_fixtures.nim | 12 ++++ 4 files changed, 123 insertions(+), 2 deletions(-) create mode 100644 tests/consensus_spec/eip7594/all_eip7594_fixtures.nim diff --git a/ConsensusSpecPreset-mainnet.md b/ConsensusSpecPreset-mainnet.md index 542a6f0907..ff367d8a2f 100644 --- a/ConsensusSpecPreset-mainnet.md +++ b/ConsensusSpecPreset-mainnet.md @@ -2396,6 +2396,60 @@ OK: 25/25 Fail: 0/25 Skip: 0/25 + test_process_light_client_update_not_timeout OK ``` OK: 4/4 Fail: 0/4 Skip: 0/4 +## EF - EIP7594 - SSZ consensus objects [Preset: mainnet] +```diff ++ Testing AggregateAndProof OK ++ Testing Attestation OK ++ Testing AttestationData OK ++ Testing AttesterSlashing OK ++ Testing BLSToExecutionChange OK ++ Testing BeaconBlock OK ++ Testing BeaconBlockBody OK ++ Testing BeaconBlockHeader OK ++ Testing BeaconState OK ++ Testing BlobIdentifier OK ++ Testing BlobSidecar OK ++ Testing Checkpoint OK ++ Testing ContributionAndProof OK ++ Testing DataColumnIdentifier OK ++ Testing DataColumnSidecar OK ++ Testing Deposit OK ++ Testing DepositData OK ++ Testing DepositMessage OK ++ Testing Eth1Block OK ++ Testing Eth1Data OK ++ Testing ExecutionPayload OK ++ Testing ExecutionPayloadHeader OK ++ Testing Fork OK ++ Testing ForkData OK ++ Testing HistoricalBatch OK ++ Testing HistoricalSummary OK ++ Testing IndexedAttestation OK ++ Testing LightClientBootstrap OK ++ Testing LightClientFinalityUpdate OK ++ Testing LightClientHeader OK ++ Testing LightClientOptimisticUpdate OK ++ Testing LightClientUpdate OK ++ Testing PendingAttestation OK ++ Testing PowBlock OK ++ Testing ProposerSlashing OK ++ Testing SignedAggregateAndProof OK ++ Testing SignedBLSToExecutionChange OK ++ Testing SignedBeaconBlock OK ++ Testing SignedBeaconBlockHeader OK ++ Testing SignedContributionAndProof OK ++ Testing SignedVoluntaryExit OK ++ Testing SigningData OK ++ Testing SyncAggregate OK ++ Testing SyncAggregatorSelectionData OK ++ Testing SyncCommittee OK ++ Testing SyncCommitteeContribution OK ++ Testing SyncCommitteeMessage OK ++ Testing Validator OK ++ Testing VoluntaryExit OK ++ Testing Withdrawal OK +``` +OK: 50/50 Fail: 0/50 Skip: 0/50 ## EF - Electra - Epoch Processing - Effective balance updates [Preset: mainnet] ```diff + Effective balance updates - effective_balance_hysteresis [Preset: mainnet] OK @@ -3675,4 +3729,4 @@ OK: 69/88 Fail: 0/88 Skip: 19/88 OK: 3/3 Fail: 0/3 Skip: 0/3 ---TOTAL--- -OK: 2961/2981 Fail: 0/2981 Skip: 20/2981 +OK: 3011/3031 Fail: 0/3031 Skip: 20/3031 diff --git a/ConsensusSpecPreset-minimal.md b/ConsensusSpecPreset-minimal.md index edd1c355a1..e2fdfa2c67 100644 --- a/ConsensusSpecPreset-minimal.md +++ b/ConsensusSpecPreset-minimal.md @@ -2505,6 +2505,60 @@ OK: 30/30 Fail: 0/30 Skip: 0/30 + test_process_light_client_update_not_timeout OK ``` OK: 4/4 Fail: 0/4 Skip: 0/4 +## EF - EIP7594 - SSZ consensus objects [Preset: minimal] +```diff ++ Testing AggregateAndProof OK ++ Testing Attestation OK ++ Testing AttestationData OK ++ Testing AttesterSlashing OK ++ Testing BLSToExecutionChange OK ++ Testing BeaconBlock OK ++ Testing BeaconBlockBody OK ++ Testing BeaconBlockHeader OK ++ Testing BeaconState OK ++ Testing BlobIdentifier OK ++ Testing BlobSidecar OK ++ Testing Checkpoint OK ++ Testing ContributionAndProof OK ++ Testing DataColumnIdentifier OK ++ Testing DataColumnSidecar OK ++ Testing Deposit OK ++ Testing DepositData OK ++ Testing DepositMessage OK ++ Testing Eth1Block OK ++ Testing Eth1Data OK ++ Testing ExecutionPayload OK ++ Testing ExecutionPayloadHeader OK ++ Testing Fork OK ++ Testing ForkData OK ++ Testing HistoricalBatch OK ++ Testing HistoricalSummary OK ++ Testing IndexedAttestation OK ++ Testing LightClientBootstrap OK ++ Testing LightClientFinalityUpdate OK ++ Testing LightClientHeader OK ++ Testing LightClientOptimisticUpdate OK ++ Testing LightClientUpdate OK ++ Testing PendingAttestation OK ++ Testing PowBlock OK ++ Testing ProposerSlashing OK ++ Testing SignedAggregateAndProof OK ++ Testing SignedBLSToExecutionChange OK ++ Testing SignedBeaconBlock OK ++ Testing SignedBeaconBlockHeader OK ++ Testing SignedContributionAndProof OK ++ Testing SignedVoluntaryExit OK ++ Testing SigningData OK ++ Testing SyncAggregate OK ++ Testing SyncAggregatorSelectionData OK ++ Testing SyncCommittee OK ++ Testing SyncCommitteeContribution OK ++ Testing SyncCommitteeMessage OK ++ Testing Validator OK ++ Testing VoluntaryExit OK ++ Testing Withdrawal OK +``` +OK: 50/50 Fail: 0/50 Skip: 0/50 ## EF - Electra - Epoch Processing - Effective balance updates [Preset: minimal] ```diff + Effective balance updates - effective_balance_hysteresis [Preset: minimal] OK @@ -4005,4 +4059,4 @@ OK: 185/207 Fail: 0/207 Skip: 22/207 OK: 3/3 Fail: 0/3 Skip: 0/3 ---TOTAL--- -OK: 3256/3279 Fail: 0/3279 Skip: 23/3279 +OK: 3306/3329 Fail: 0/3329 Skip: 23/3329 diff --git a/tests/consensus_spec/consensus_spec_tests_preset.nim b/tests/consensus_spec/consensus_spec_tests_preset.nim index b5ad485cd4..1aa23a82e6 100644 --- a/tests/consensus_spec/consensus_spec_tests_preset.nim +++ b/tests/consensus_spec/consensus_spec_tests_preset.nim @@ -17,6 +17,7 @@ import ./bellatrix/all_bellatrix_fixtures, ./capella/all_capella_fixtures, ./deneb/all_deneb_fixtures, + ./eip7594/all_eip7594_fixtures, ./electra/all_electra_fixtures, ./test_fixture_fork, ./test_fixture_fork_choice, diff --git a/tests/consensus_spec/eip7594/all_eip7594_fixtures.nim b/tests/consensus_spec/eip7594/all_eip7594_fixtures.nim new file mode 100644 index 0000000000..952c08f92f --- /dev/null +++ b/tests/consensus_spec/eip7594/all_eip7594_fixtures.nim @@ -0,0 +1,12 @@ +# beacon_chain +# Copyright (c) 2022-2024 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} +{.used.} + +import + ./test_fixture_ssz_consensus_objects From d8acc169180a44ddff5abcaa48d51889b38b79ca Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Tue, 11 Jun 2024 15:12:09 +0530 Subject: [PATCH 29/35] rename: KzgCell --> Cell --- beacon_chain/spec/eip7594_helpers.nim | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index 1e9505c7db..bc09fdebf9 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -78,7 +78,7 @@ proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[ExtendedMatrix, cstr ok(extended_matrix) # https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#recover_matrix -proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: uint64): Result[ExtendedMatrix, cstring] = +proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), Cell], blobCount: uint64): Result[ExtendedMatrix, cstring] = # This helper demonstrates how to apply recover_all_cells # The data structure for storing cells is implementation-dependent @@ -94,7 +94,7 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), KzgCell], blobCount: if blIdx == blobIndex: cellIds.add(cellId) - var cells: seq[KzgCell] + var cells: seq[Cell] for cellId in cellIds: var interim_key = (BlobIndex(blobIndex), cellId) @@ -130,7 +130,7 @@ proc get_data_column_sidecars*(signed_block: deneb.SignedBeaconBlock, blobs: seq let blobCount = blobs.len var - cells: seq[seq[KzgCell]] + cells: seq[seq[Cell]] proofs: seq[seq[KzgProof]] for i in 0.. Date: Thu, 13 Jun 2024 16:19:15 +0530 Subject: [PATCH 30/35] add: testing init for peerdas eip --- beacon_chain/spec/datatypes/eip7594.nim | 2 +- beacon_chain/spec/eip7594_helpers.nim | 24 ++++------ tests/all_tests.nim | 1 + tests/test_eip7594_helpers.nim | 59 +++++++++++++++++++++++++ 4 files changed, 70 insertions(+), 16 deletions(-) create mode 100644 tests/test_eip7594_helpers.nim diff --git a/beacon_chain/spec/datatypes/eip7594.nim b/beacon_chain/spec/datatypes/eip7594.nim index cfdafefd79..b39a5190cd 100644 --- a/beacon_chain/spec/datatypes/eip7594.nim +++ b/beacon_chain/spec/datatypes/eip7594.nim @@ -9,7 +9,7 @@ import "."/[base, deneb], kzg4844 -export base, kzg4844 +export base const FIELD_ELEMENTS_PER_EXT_BLOB* = 2 * kzg_abi.FIELD_ELEMENTS_PER_BLOB diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index bc09fdebf9..524ea6d3c1 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -14,6 +14,7 @@ import ssz_serialization/proofs, chronicles, ./[beacon_time, crypto], + kzg4844/kzg_ex, eth/p2p/discoveryv5/[node], ./helpers, ./datatypes/[eip7594, deneb] @@ -67,14 +68,15 @@ proc get_custody_columns*(node_id: NodeId, custody_subnet_count: uint64): Result proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[ExtendedMatrix, cstring] = # This helper demonstrates the relationship between blobs and `ExtendedMatrix` var extended_matrix: ExtendedMatrix - for blob in blobs: - let res = computeCells(blob) - + for i in 0.. MAX_TOP_BYTE and i %% kzg_abi.BYTES_PER_FIELD_ELEMENT == 0: + blob[i] = MAX_TOP_BYTE + blobs.add(blob) + + ok(blobs) + +suite "EIP-7594 Unit Tests": + test "EIP-7594: Compute Extended Matrix": + proc testComputeExtendedMatrix() = + let blob_count = 2 + let input_blobs = createSampleKzgBlobs(blob_count) + let extended_matrix = compute_extended_matrix(input_blobs.get) + + doAssert extended_matrix.get.len == kzg_abi.CELLS_PER_EXT_BLOB * blob_count + + testComputeExtendedMatrix() \ No newline at end of file From 96b3e95f8eb5d3d97c2728f8b2c57f714aaa85e5 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Thu, 13 Jun 2024 16:48:28 +0530 Subject: [PATCH 31/35] fix: suggested fixes --- beacon_chain/spec/eip7594_helpers.nim | 9 +++++---- tests/consensus_spec/test_fixture_kzg.nim | 6 +++--- tests/consensus_spec/test_fixture_networking.nim | 6 +----- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index 524ea6d3c1..e724cb8055 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -87,9 +87,10 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), Cell], blobCount: ui var extended_matrix: ExtendedMatrix for blobIndex in 0'u64.. Date: Thu, 13 Jun 2024 20:15:00 +0530 Subject: [PATCH 32/35] added another unit test, disabling test in CI for now, because changes in spec --- beacon_chain/spec/datatypes/eip7594.nim | 6 ++++ beacon_chain/spec/eip7594_helpers.nim | 17 +++++++++ tests/all_tests.nim | 1 - tests/test_eip7594_helpers.nim | 47 ++++++++++++++++++++++--- 4 files changed, 65 insertions(+), 6 deletions(-) diff --git a/beacon_chain/spec/datatypes/eip7594.nim b/beacon_chain/spec/datatypes/eip7594.nim index b39a5190cd..660d8f900a 100644 --- a/beacon_chain/spec/datatypes/eip7594.nim +++ b/beacon_chain/spec/datatypes/eip7594.nim @@ -60,6 +60,12 @@ type DataColumnIdentifier* = object block_root*: Eth2Digest index*: ColumnIndex + + MatrixEntry* = object + cell*: Cell + kzg_proof*: KzgProof + column_index*: ColumnIndex + row_index*: RowIndex func shortLog*(v: DataColumnSidecar): auto = ( diff --git a/beacon_chain/spec/eip7594_helpers.nim b/beacon_chain/spec/eip7594_helpers.nim index e724cb8055..c038ce2707 100644 --- a/beacon_chain/spec/eip7594_helpers.nim +++ b/beacon_chain/spec/eip7594_helpers.nim @@ -114,6 +114,23 @@ proc recover_matrix*(cells_dict: Table[(BlobIndex, CellID), Cell], blobCount: ui ok(extended_matrix) +proc recover_matrix*(partial_matrix: seq[MatrixEntry], blobCount: int): Result[seq[MatrixEntry], cstring] = + # This helper demonstrates how to apply recover_cells_and_kzg_proofs + # The data structure for storing cells is implementation-dependent + + var extended_matrix: seq[MatrixEntry] + for blob_index in 0.. Date: Fri, 14 Jun 2024 11:34:36 +0530 Subject: [PATCH 33/35] bumped nim-kzg4844 to 2880673a7af5d96bfc91d51c5c0c058be07a6c57 --- vendor/nim-kzg4844 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index d915948dd5..2880673a7a 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit d915948dd58c2ad23b551cd408066046cf5e46db +Subproject commit 2880673a7af5d96bfc91d51c5c0c058be07a6c57 From 6b0223b078bf3ba15b3c99fd2f20e19bfbb8e50c Mon Sep 17 00:00:00 2001 From: Agnish Ghosh Date: Fri, 14 Jun 2024 11:47:12 +0530 Subject: [PATCH 34/35] bump down due to error in C api --- vendor/nim-kzg4844 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 2880673a7a..d915948dd5 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 2880673a7af5d96bfc91d51c5c0c058be07a6c57 +Subproject commit d915948dd58c2ad23b551cd408066046cf5e46db From 6ec2774da971c3589c27395d8fcc406342f5eef8 Mon Sep 17 00:00:00 2001 From: Agnish Ghosh <80243668+agnxsh@users.noreply.github.com> Date: Wed, 17 Jul 2024 17:24:54 +0530 Subject: [PATCH 35/35] fix: linux-amd failure for `test_fixture_kzg` ~ peerdas branch(es) (#6428) * move proofs and cells to ref * move returned value to ref --- tests/consensus_spec/test_fixture_kzg.nim | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/consensus_spec/test_fixture_kzg.nim b/tests/consensus_spec/test_fixture_kzg.nim index 029007673b..91c9bcf6c0 100644 --- a/tests/consensus_spec/test_fixture_kzg.nim +++ b/tests/consensus_spec/test_fixture_kzg.nim @@ -208,12 +208,12 @@ proc runComputeCellsTest(suiteName, suitePath, path: string) = if blob.isNone: check output.kind == JNull else: - let p = computeCells(blob.get) - if p.isErr: + let p = newClone computeCells(blob.get) + if p[].isErr: check output.kind == JNull else: - for i in 0..