From af04410a1c577bd57fab1f0763789d0081a8e847 Mon Sep 17 00:00:00 2001 From: Ignacio Hagopian Date: Mon, 11 Mar 2024 09:01:21 -0300 Subject: [PATCH] Update EIP-6800: pedersen hash endianness and group to field fixes Merged by EIP-Bot. --- EIPS/eip-6800.md | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/EIPS/eip-6800.md b/EIPS/eip-6800.md index 11986b7cdc78b..9e2ea69f065ec 100644 --- a/EIPS/eip-6800.md +++ b/EIPS/eip-6800.md @@ -35,14 +35,14 @@ BANDERSNATCH_MODULUS = \ PEDERSEN_BASIS = [....] VERKLE_NODE_WIDTH = len(PEDERSEN_BASIS) -def group_to_field(point: Point) -> int: +def group_to_scalar_field(point: Point) -> int: # Not collision resistant. Not random oracle. # Binding for Pedersen commitments. assert isinstance(point, Point) if point == bandersnatch.Z: return 0 else: - return int.from_bytes(point.serialize(), 'little') % BANDERSNATCH_MODULUS + return int.from_bytes(point.map_to_base_field().to_bytes(32, 'little'), 'little') % BANDERSNATCH_MODULUS def compute_commitment_root(children: Sequence[int]) -> Point: o = bandersnatch.Z @@ -59,8 +59,8 @@ def extension_and_suffix_tree(stem: bytes31, values: Dict[byte, bytes32]) -> int C2 = compute_commitment_root(sub_leaves[256:]) return compute_commitment_root([1, # Extension marker int.from_bytes(stem, "little"), - group_to_field(C1), - group_to_field(C2)] + + group_to_scalar_field(C1), + group_to_scalar_field(C2)] + [0] * 252) def compute_main_tree_root(data: Dict[bytes32, int], @@ -78,7 +78,7 @@ def compute_main_tree_root(data: Dict[bytes32, int], }, prefix + bytes([i])) for i in range(VERKLE_NODE_WIDTH) ] - return group_to_field(compute_commitment_root(sub_commitments)) + return group_to_scalar_field(compute_commitment_root(sub_commitments)) def compute_verkle_root(data: Dict[bytes32, bytes32]) -> Point: stems = set(key[:-1] for key in data.keys()) @@ -87,7 +87,7 @@ def compute_verkle_root(data: Dict[bytes32, bytes32]) -> Point: commitment_data = Dict[byte, bytes32]() for i in range(VERKLE_NODE_WIDTH): if stem + bytes([i]) in data: - commitment_data[i] = data[stem + bytes([i]) + commitment_data[i] = data[stem + bytes([i])] data_as_stems[stem] = extension_and_suffix_tree(stem, commitment_data) sub_commitments = [ compute_main_tree_root({ @@ -113,17 +113,17 @@ This is an illustration of the tree structure. Instead of a two-layer structure as in the Patricia tree, in the Verkle tree we will embed all information into a single `key: value` tree. This section specifies which tree keys store the information (account header data, code, storage) in the state. -|Parameter |Value| -|----------|-----| -|VERSION_LEAF_KEY|0| -|BALANCE_LEAF_KEY|1| -|NONCE_LEAF_KEY|2| -|CODE_KECCAK_LEAF_KEY|3| -|CODE_SIZE_LEAF_KEY|4| -|HEADER_STORAGE_OFFSET|64| -|CODE_OFFSET|128| -|VERKLE_NODE_WIDTH|256| -|MAIN_STORAGE_OFFSET|256**31| +| Parameter | Value | +| --------------------- | ------- | +| VERSION_LEAF_KEY | 0 | +| BALANCE_LEAF_KEY | 1 | +| NONCE_LEAF_KEY | 2 | +| CODE_KECCAK_LEAF_KEY | 3 | +| CODE_SIZE_LEAF_KEY | 4 | +| HEADER_STORAGE_OFFSET | 64 | +| CODE_OFFSET | 128 | +| VERKLE_NODE_WIDTH | 256 | +| MAIN_STORAGE_OFFSET | 256**31 | _It’s a required invariant that `VERKLE_NODE_WIDTH > CODE_OFFSET > HEADER_STORAGE_OFFSET` and that `HEADER_STORAGE_OFFSET` is greater than the leaf keys. Additionally, `MAIN_STORAGE_OFFSET` must be a power of `VERKLE_NODE_WIDTH`._ @@ -144,7 +144,7 @@ def pedersen_hash(inp: bytes) -> bytes32: # Interpret input as list of 128 bit (16 byte) integers ext_input = inp + b"\0" * (255 * 16 - len(inp)) ints = [2 + 256 * len(inp)] + \ - [int.from_bytes(ext_input[16 * i:16 * (i + 1)]) for i in range(255)] + [int.from_bytes(ext_input[16 * i:16 * (i + 1)], 'little') for i in range(255)] return compute_commitment_root(ints).serialize() def get_tree_key(address: Address32, tree_index: int, sub_index: int):