diff --git a/src/ethereum_test_fixtures/blockchain.py b/src/ethereum_test_fixtures/blockchain.py index 2133cbecf5..56a4ec0685 100644 --- a/src/ethereum_test_fixtures/blockchain.py +++ b/src/ethereum_test_fixtures/blockchain.py @@ -106,9 +106,9 @@ class FixtureHeader(CamelModel): extra_data: Bytes prev_randao: Hash = Field(Hash(0), alias="mixHash") nonce: HeaderNonce = Field(HeaderNonce(0), validate_default=True) - base_fee_per_gas: Annotated[ - ZeroPaddedHexNumber, HeaderForkRequirement("base_fee") - ] | None = Field(None) + base_fee_per_gas: Annotated[ZeroPaddedHexNumber, HeaderForkRequirement("base_fee")] | None = ( + Field(None) + ) withdrawals_root: Annotated[Hash, HeaderForkRequirement("withdrawals")] | None = Field(None) blob_gas_used: ( Annotated[ZeroPaddedHexNumber, HeaderForkRequirement("blob_gas_used")] | None @@ -234,9 +234,9 @@ def from_fixture_header( withdrawals=withdrawals, deposit_requests=requests.deposit_requests() if requests is not None else None, withdrawal_requests=requests.withdrawal_requests() if requests is not None else None, - consolidation_requests=requests.consolidation_requests() - if requests is not None - else None, + consolidation_requests=( + requests.consolidation_requests() if requests is not None else None + ), ) diff --git a/src/ethereum_test_forks/__init__.py b/src/ethereum_test_forks/__init__.py index 456679967b..27185fd4f3 100644 --- a/src/ethereum_test_forks/__init__.py +++ b/src/ethereum_test_forks/__init__.py @@ -10,6 +10,7 @@ Cancun, Constantinople, ConstantinopleFix, + EIP6800Transition, Frontier, GrayGlacier, Homestead, @@ -19,6 +20,7 @@ Paris, Prague, Shanghai, + ShanghaiEIP6800, ) from .forks.transition import ( BerlinToLondonAt5, @@ -60,6 +62,8 @@ "MuirGlacier", "Shanghai", "ShanghaiToCancunAtTime15k", + "ShanghaiEIP6800", + "EIP6800Transition", "Cancun", "Prague", "get_transition_forks", diff --git a/src/ethereum_test_forks/base_fork.py b/src/ethereum_test_forks/base_fork.py index 2e845caa4a..d39ea48c78 100644 --- a/src/ethereum_test_forks/base_fork.py +++ b/src/ethereum_test_forks/base_fork.py @@ -221,6 +221,22 @@ def pre_allocation_blockchain(cls) -> Mapping: """ pass + @classmethod + @abstractmethod + def environment_verkle_conversion_starts(cls) -> bool: + """ + Returns true if the fork starts the verkle conversion process. + """ + pass + + @classmethod + @abstractmethod + def environment_verkle_conversion_completed(cls) -> bool: + """ + Returns true if verkle conversion must have been completed by this fork. + """ + pass + # Engine API information abstract methods @classmethod @abstractmethod diff --git a/src/ethereum_test_forks/forks/constants.py b/src/ethereum_test_forks/forks/constants.py new file mode 100644 index 0000000000..9a5f21aa9f --- /dev/null +++ b/src/ethereum_test_forks/forks/constants.py @@ -0,0 +1,62 @@ +""" +Constant values used by the forks. +""" + +from typing import Dict, Generator, Iterator, Mapping, Tuple + +from Crypto.Hash import SHA256 + +# TODO: Use for large verkle conversion init MPT +MAX_ACCOUNTS = 1000 +MAX_NONCE = 2**64 - 1 +MAX_BALANCE = 2**256 - 1 +MAX_STORAGE_SLOTS_PER_ACCOUNT = 1000 +MAX_ACCOUNT_CODE_SIZE = 2**14 + 2**13 # EIP-170 + + +def seed_generator(seed: int) -> Generator[int, None, None]: + """ + Generate a seed using the SHA256 hash function. + """ + seed = int.from_bytes( + bytes=SHA256.new(data=seed.to_bytes(length=256, byteorder="big")).digest(), byteorder="big" + ) + while True: + yield seed + seed = int.from_bytes( + bytes=SHA256.new(data=seed.to_bytes(length=256, byteorder="big")).digest(), + byteorder="big", + ) + + +def storage_generator( + seed: Iterator[int], max_slots: int +) -> Generator[Tuple[int, int], None, None]: + """ + Generate storage slots for an account. + """ + MAX_KEY_VALUE = 2**256 - 1 + for _ in range(max_slots): + yield next(seed) % MAX_KEY_VALUE, next(seed) % MAX_KEY_VALUE + + +def account_generator( + seed: Iterator[int], max_accounts: int +) -> Generator[Tuple[int, Dict[str, str | int | Dict[int, int]]], None, None]: + """ + Generate accounts. + """ + for _ in range(max_accounts): + storage_g = storage_generator(seed, next(seed) % MAX_STORAGE_SLOTS_PER_ACCOUNT) + yield next(seed) % 2**160, { + "nonce": next(seed) % MAX_NONCE, + "balance": next(seed) % MAX_BALANCE, + "storage": {k: v for k, v in storage_g}, + "code": "0x" + "00" * 32, + } + + +VERKLE_PRE_ALLOCATION: Mapping = { + addr: account + for addr, account in account_generator(seed=seed_generator(0), max_accounts=MAX_ACCOUNTS) +} diff --git a/src/ethereum_test_forks/forks/forks.py b/src/ethereum_test_forks/forks/forks.py index 18ce8fa33b..ae56ae7c1b 100644 --- a/src/ethereum_test_forks/forks/forks.py +++ b/src/ethereum_test_forks/forks/forks.py @@ -10,6 +10,7 @@ from semver import Version from ..base_fork import BaseFork +from ..transition_base_fork import transition_fork CURRENT_FILE = Path(realpath(__file__)) CURRENT_FOLDER = CURRENT_FILE.parent @@ -188,6 +189,20 @@ def pre_allocation_blockchain(cls) -> Mapping: """ return {} + @classmethod + def environment_verkle_conversion_starts(cls) -> bool: + """ + Returns true if the fork starts the verkle conversion process. + """ + return False + + @classmethod + def environment_verkle_conversion_completed(cls) -> bool: + """ + Returns true if verkle conversion must have been completed by this fork. + """ + return False + class Homestead(Frontier): """ @@ -615,7 +630,65 @@ def engine_forkchoice_updated_version( return 3 -class CancunEIP7692( # noqa: SC200 +class ShanghaiEIP6800( + Shanghai, + transition_tool_name="Prague", + blockchain_test_network_name="Prague", + solc_name="shanghai", +): + """ + Shanghai + EIP-6800 (Verkle) fork + """ + + @classmethod + def is_deployed(cls) -> bool: + """ + Flags that the fork has not been deployed to mainnet; it is under active + development. + """ + return False + + @classmethod + def environment_verkle_conversion_completed(cls) -> bool: + """ + Verkle conversion has already completed in this fork. + """ + return True + + @classmethod + def pre_allocation_blockchain(cls) -> Mapping: + """ + Verkle requires pre-allocation of the history storage contract for EIP-2935 on blockchain + type tests. + """ + new_allocation = { + 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE: { + "nonce": 1, + "code": ( + "0x60203611603157600143035f35116029575f35612000014311602957612000" + "5f3506545f5260205ff35b5f5f5260205ff35b5f5ffd00" + ), + } + } + # TODO: Utilize when testing for large init MPT + # return VERKLE_PRE_ALLOCATION | super(Shanghai, cls).pre_allocation() + return new_allocation | super(Shanghai, cls).pre_allocation_blockchain() + + +# TODO: move back to transition.py after filling and executing ShanghaiEIP6800 tests successfully +@transition_fork(to_fork=ShanghaiEIP6800, at_timestamp=32) +class EIP6800Transition( + Shanghai, + blockchain_test_network_name="ShanghaiToPragueAtTime32", +): + """ + Shanghai to Verkle transition at Timestamp 32. + """ + + pass + + +class CancunEIP7692( Cancun, transition_tool_name="Prague", # Evmone enables (only) EOF at Prague blockchain_test_network_name="Prague", # Evmone enables (only) EOF at Prague diff --git a/src/ethereum_test_forks/forks/transition.py b/src/ethereum_test_forks/forks/transition.py index 76b9a25eb1..8d442afff4 100644 --- a/src/ethereum_test_forks/forks/transition.py +++ b/src/ethereum_test_forks/forks/transition.py @@ -1,11 +1,11 @@ """ List of all transition fork definitions. """ + from ..transition_base_fork import transition_fork from .forks import Berlin, Cancun, London, Paris, Prague, Shanghai -# Transition Forks @transition_fork(to_fork=London, at_block=5) class BerlinToLondonAt5(Berlin): """ diff --git a/src/ethereum_test_forks/helpers.py b/src/ethereum_test_forks/helpers.py index 62da80568d..b77d6ad688 100644 --- a/src/ethereum_test_forks/helpers.py +++ b/src/ethereum_test_forks/helpers.py @@ -1,6 +1,7 @@ """ Helper methods to resolve forks during test filling """ + from typing import List, Optional from semver import Version @@ -32,6 +33,9 @@ def get_forks() -> List[Fork]: continue if issubclass(fork, BaseFork) and fork is not BaseFork: all_forks.append(fork) + + all_forks += get_transition_forks(always_execute=True) + return all_forks @@ -87,7 +91,7 @@ def get_closest_fork_with_solc_support(fork: Fork, solc_version: Version) -> Opt ) -def get_transition_forks() -> List[Fork]: +def get_transition_forks(always_execute: bool = False) -> List[Fork]: """ Returns all the transition forks """ @@ -98,6 +102,8 @@ def get_transition_forks() -> List[Fork]: if not isinstance(fork, type): continue if issubclass(fork, TransitionBaseClass) and issubclass(fork, BaseFork): + if always_execute and not fork.always_execute(): + continue transition_forks.append(fork) return transition_forks diff --git a/src/ethereum_test_forks/transition_base_fork.py b/src/ethereum_test_forks/transition_base_fork.py index e067d89f6e..241a523b2a 100644 --- a/src/ethereum_test_forks/transition_base_fork.py +++ b/src/ethereum_test_forks/transition_base_fork.py @@ -30,6 +30,14 @@ def transitions_from(cls) -> Fork: """ raise Exception("Not implemented") + @classmethod + def always_execute(cls) -> bool: + """ + Whether the transition fork should be treated as a normal fork and all tests should + be filled with it. + """ + raise Exception("Not implemented") + def base_fork_abstract_methods() -> List[str]: """ @@ -38,7 +46,9 @@ def base_fork_abstract_methods() -> List[str]: return list(getattr(BaseFork, "__abstractmethods__")) -def transition_fork(to_fork: Fork, at_block: int = 0, at_timestamp: int = 0): +def transition_fork( + to_fork: Fork, at_block: int = 0, at_timestamp: int = 0, always_execute: bool = False +): """ Decorator to mark a class as a transition fork. """ @@ -102,6 +112,7 @@ def transition_method( NewTransitionClass.transitions_to = lambda: to_fork # type: ignore NewTransitionClass.transitions_from = lambda: from_fork # type: ignore + NewTransitionClass.always_execute = lambda: always_execute # type: ignore NewTransitionClass.fork_at = lambda block_number=0, timestamp=0: ( # type: ignore to_fork if block_number >= at_block and timestamp >= at_timestamp else from_fork ) diff --git a/src/ethereum_test_specs/base.py b/src/ethereum_test_specs/base.py index f839a47b7a..3a7bfb6f60 100644 --- a/src/ethereum_test_specs/base.py +++ b/src/ethereum_test_specs/base.py @@ -14,7 +14,7 @@ from ethereum_test_base_types import to_hex from ethereum_test_fixtures import BaseFixture, FixtureFormats from ethereum_test_forks import Fork -from ethereum_test_types import Environment, Transaction, Withdrawal +from ethereum_test_types import Alloc, Environment, Transaction, VerkleTree, Withdrawal from evm_transition_tool import Result, TransitionTool @@ -61,6 +61,30 @@ def verify_result(result: Result, env: Environment): assert result.withdrawals_root == to_hex(Withdrawal.list_root(env.withdrawals)) +def verify_post_vkt(t8n: TransitionTool, expected_post: Alloc, got_vkt: VerkleTree): + """ + Verify that the final verkle tree from t8n matches the expected post alloc defined within + the test. Raises exception on unexpected values. + """ + if not t8n.verkle_subcommand: + raise Exception("Only geth's evm tool is supported to verify verkle trees.") + + # Convert the expected post alloc to a verkle tree for comparison. + expected_vkt = t8n.from_mpt_to_vkt(mpt_alloc=expected_post) + + # TODO: utilize missing keys? + # Check for keys that are missing the actual VKT + _ = [key for key in expected_vkt.root if key not in got_vkt.root] + + # Compare the values for each key in the expected VKT + for key, expected_value in expected_vkt.root.items(): + actual_value = got_vkt.root.get(key) + if expected_value != actual_value: + raise Exception( + f"VKT mismatch at key {key}: expected {expected_value}, got {actual_value}" + ) + + class BaseTest(BaseModel): """ Represents a base Ethereum test which must return a single test fixture. diff --git a/src/ethereum_test_specs/blockchain.py b/src/ethereum_test_specs/blockchain.py index 6b4f7b44e4..e26905da88 100644 --- a/src/ethereum_test_specs/blockchain.py +++ b/src/ethereum_test_specs/blockchain.py @@ -18,6 +18,7 @@ HeaderNonce, HexNumber, Number, + to_json, ) from ethereum_test_exceptions import BlockException, EngineAPIError, TransactionException from ethereum_test_fixtures import BaseFixture, FixtureFormats @@ -35,7 +36,7 @@ FixtureWithdrawalRequest, InvalidFixtureBlock, ) -from ethereum_test_forks import Fork +from ethereum_test_forks import EIP6800Transition, Fork, ShanghaiEIP6800 from ethereum_test_types import ( Alloc, ConsolidationRequest, @@ -44,6 +45,7 @@ Removable, Requests, Transaction, + VerkleTree, Withdrawal, WithdrawalRequest, ) @@ -230,9 +232,9 @@ class Block(Header): An RLP modifying header which values would be used to override the ones returned by the `evm_transition_tool`. """ - exception: List[ - TransactionException | BlockException - ] | TransactionException | BlockException | None = None + exception: ( + List[TransactionException | BlockException] | TransactionException | BlockException | None + ) = None """ If set, the block is expected to be rejected by the client. """ @@ -364,13 +366,13 @@ def make_genesis( base_fee_per_gas=env.base_fee_per_gas, blob_gas_used=env.blob_gas_used, excess_blob_gas=env.excess_blob_gas, - withdrawals_root=Withdrawal.list_root(env.withdrawals) - if env.withdrawals is not None - else None, + withdrawals_root=( + Withdrawal.list_root(env.withdrawals) if env.withdrawals is not None else None + ), parent_beacon_block_root=env.parent_beacon_block_root, - requests_root=Requests(root=[]).trie_root - if fork.header_requests_required(0, 0) - else None, + requests_root=( + Requests(root=[]).trie_root if fork.header_requests_required(0, 0) else None + ), ) return ( @@ -393,8 +395,16 @@ def generate_block_data( block: Block, previous_env: Environment, previous_alloc: Alloc, + previous_vkt: Optional[VerkleTree] = None, eips: Optional[List[int]] = None, - ) -> Tuple[FixtureHeader, List[Transaction], Requests | None, Alloc, Environment]: + ) -> Tuple[ + Environment, + FixtureHeader, + List[Transaction], + Alloc, + Optional[Requests], + Optional[VerkleTree], + ]: """ Generate common block data for both make_fixture and make_hive_fixture. """ @@ -426,6 +436,7 @@ def generate_block_data( txs=txs, env=env, fork=fork, + vkt=to_json(previous_vkt) if previous_vkt is not None else None, chain_id=self.chain_id, reward=fork.get_reward(env.number, env.timestamp), eips=eips, @@ -440,6 +451,8 @@ def generate_block_data( pprint(transition_tool_output.result) pprint(previous_alloc) pprint(transition_tool_output.alloc) + if transition_tool_output.vkt is not None: + pprint(transition_tool_output.vkt) raise e if len(rejected_txs) > 0 and block.exception is None: @@ -504,12 +517,17 @@ def generate_block_data( requests = Requests(root=block.requests) header.requests_root = requests.trie_root + if fork.fork_at(env.number, env.timestamp) == ShanghaiEIP6800: + env.update_from_result(transition_tool_output.result) + transition_tool_output.alloc = previous_alloc + return ( + env, header, txs, - requests, transition_tool_output.alloc, - env, + requests, + transition_tool_output.vkt, ) def network_info(self, fork: Fork, eips: Optional[List[int]] = None): @@ -522,12 +540,25 @@ def network_info(self, fork: Fork, eips: Optional[List[int]] = None): else fork.blockchain_test_network_name() ) - def verify_post_state(self, t8n, alloc: Alloc): + def verify_post_state( + self, + env: Environment, + t8n: TransitionTool, + alloc: Alloc, + vkt: Optional[VerkleTree] = None, + ): """ - Verifies the post alloc after all block/s or payload/s are generated. + Verifies the post state after all block/s or payload/s are generated. """ try: - self.post.verify_post_alloc(alloc) + if env.verkle_conversion_started: + if vkt is not None: + pass # TODO: skip exact account verify checks + # verify_post_vkt(t8n=t8n, expected_post=self.post, got_vkt=vkt) + else: + raise Exception("vkt conversion started but no vkt was created.") + else: + self.post.verify_post_alloc(got_alloc=alloc) except Exception as e: print_traces(t8n.get_traces()) raise e @@ -548,52 +579,75 @@ def make_fixture( alloc = pre env = environment_from_parent_header(genesis.header) head = genesis.header.block_hash + vkt: Optional[VerkleTree] = None + + # Hack for filling naive verkle transition tests + if fork is EIP6800Transition: + # Add a dummy block before the fork transition + self.blocks.insert(0, Block(timestamp=HexNumber(fork.transition_at() - 1))) + # Set timestamp for the next block to verkle transition time + self.blocks[1].timestamp = HexNumber(fork.transition_at()) + # Increment all other block numbers + for i, block in enumerate(self.blocks[1:]): + block.number = HexNumber(i + 2) + # Add a dummy block at the end of the test blocks + self.blocks.append(Block()) for block in self.blocks: if block.rlp is None: # This is the most common case, the RLP needs to be constructed # based on the transactions to be included in the block. # Set the environment according to the block to execute. - header, txs, requests, new_alloc, new_env = self.generate_block_data( + new_env, header, txs, new_alloc, requests, new_vkt = self.generate_block_data( t8n=t8n, fork=fork, block=block, previous_env=env, previous_alloc=alloc, + previous_vkt=vkt, eips=eips, ) fixture_block = FixtureBlockBase( header=header, txs=[FixtureTransaction.from_transaction(tx) for tx in txs], ommers=[], - withdrawals=[FixtureWithdrawal.from_withdrawal(w) for w in new_env.withdrawals] - if new_env.withdrawals is not None - else None, - deposit_requests=[ - FixtureDepositRequest.from_deposit_request(d) - for d in requests.deposit_requests() - ] - if requests is not None - else None, - withdrawal_requests=[ - FixtureWithdrawalRequest.from_withdrawal_request(w) - for w in requests.withdrawal_requests() - ] - if requests is not None - else None, - consolidation_requests=[ - FixtureConsolidationRequest.from_consolidation_request(c) - for c in requests.consolidation_requests() - ] - if requests is not None - else None, + withdrawals=( + [FixtureWithdrawal.from_withdrawal(w) for w in new_env.withdrawals] + if new_env.withdrawals is not None + else None + ), + deposit_requests=( + [ + FixtureDepositRequest.from_deposit_request(d) + for d in requests.deposit_requests() + ] + if requests is not None + else None + ), + withdrawal_requests=( + [ + FixtureWithdrawalRequest.from_withdrawal_request(w) + for w in requests.withdrawal_requests() + ] + if requests is not None + else None + ), + consolidation_requests=( + [ + FixtureConsolidationRequest.from_consolidation_request(c) + for c in requests.consolidation_requests() + ] + if requests is not None + else None + ), ).with_rlp(txs=txs, requests=requests) if block.exception is None: fixture_blocks.append(fixture_block) - # Update env, alloc and last block hash for the next block. + # Update env, alloc, vkt, and last block hash for the next block. alloc = new_alloc env = apply_new_parent(new_env, header) head = header.block_hash + vkt = new_vkt else: fixture_blocks.append( InvalidFixtureBlock( @@ -618,7 +672,7 @@ def make_fixture( ), ) - self.verify_post_state(t8n, alloc) + self.verify_post_state(env=env, t8n=t8n, alloc=alloc, vkt=vkt) return Fixture( fork=self.network_info(fork, eips), genesis=genesis.header, @@ -626,7 +680,7 @@ def make_fixture( blocks=fixture_blocks, last_block_hash=head, pre=pre, - post_state=alloc, + # TODO: post_state=alloc ) def make_hive_fixture( @@ -644,10 +698,29 @@ def make_hive_fixture( alloc = pre env = environment_from_parent_header(genesis.header) head_hash = genesis.header.block_hash + vkt: Optional[VerkleTree] = None + + # Hack for filling naive verkle transition tests + if fork is EIP6800Transition: + # Add a dummy block before the fork transition + self.blocks.insert(0, Block(timestamp=HexNumber(fork.transition_at() - 1))) + # Set timestamp for the next block to verkle transition time + self.blocks[1].timestamp = HexNumber(fork.transition_at()) + # Increment all other block numbers + for i, block in enumerate(self.blocks[1:]): + block.number = HexNumber(i + 2) + # Add a dummy block at the end of the test blocks + self.blocks.append(Block()) for block in self.blocks: - header, txs, requests, new_alloc, new_env = self.generate_block_data( - t8n=t8n, fork=fork, block=block, previous_env=env, previous_alloc=alloc, eips=eips + new_env, header, txs, new_alloc, requests, new_vkt = self.generate_block_data( + t8n=t8n, + fork=fork, + block=block, + previous_env=env, + previous_alloc=alloc, + previous_vkt=vkt, + eips=eips, ) if block.rlp is None: fixture_payloads.append( @@ -663,15 +736,16 @@ def make_hive_fixture( ) if block.exception is None: alloc = new_alloc - env = apply_new_parent(env, header) + env = apply_new_parent(new_env, header) head_hash = header.block_hash + vkt = new_vkt fcu_version = fork.engine_forkchoice_updated_version(header.number, header.timestamp) assert ( fcu_version is not None ), "A hive fixture was requested but no forkchoice update is defined. The framework should" " never try to execute this test case." - self.verify_post_state(t8n, alloc) + self.verify_post_state(env=env, t8n=t8n, alloc=alloc, vkt=vkt) sync_payload: Optional[FixtureEngineNewPayload] = None if self.verify_sync: @@ -683,12 +757,13 @@ def make_hive_fixture( # Most clients require the header to start the sync process, so we create an empty # block on top of the last block of the test to send it as new payload and trigger the # sync process. - sync_header, _, requests, _, _ = self.generate_block_data( + _, sync_header, _, _, requests, _ = self.generate_block_data( t8n=t8n, fork=fork, block=Block(), previous_env=env, previous_alloc=alloc, + previous_vkt=vkt, eips=eips, ) sync_payload = FixtureEngineNewPayload.from_fixture_header( @@ -707,7 +782,7 @@ def make_hive_fixture( payloads=fixture_payloads, fcu_version=fcu_version, pre=pre, - post_state=alloc, + # TODO: post_state=alloc sync_payload=sync_payload, last_block_hash=head_hash, ) diff --git a/src/ethereum_test_specs/state.py b/src/ethereum_test_specs/state.py index 18e6010d7b..35e9ca4168 100644 --- a/src/ethereum_test_specs/state.py +++ b/src/ethereum_test_specs/state.py @@ -4,6 +4,8 @@ from typing import Any, Callable, ClassVar, Dict, Generator, List, Optional, Type +import pytest + from ethereum_test_exceptions import EngineAPIError from ethereum_test_fixtures import BaseFixture, FixtureFormats from ethereum_test_fixtures.state import ( @@ -12,7 +14,7 @@ FixtureForkPost, FixtureTransaction, ) -from ethereum_test_forks import Fork +from ethereum_test_forks import EIP6800Transition, Fork from ethereum_test_types import Alloc, Environment, Transaction from evm_transition_tool import TransitionTool @@ -174,7 +176,10 @@ def generate( t8n=t8n, fork=fork, fixture_format=fixture_format, eips=eips ) elif fixture_format == FixtureFormats.STATE_TEST: - return self.make_state_test_fixture(t8n, fork, eips) + if fork is not EIP6800Transition: + return self.make_state_test_fixture(t8n, fork, eips) + else: + pytest.skip("State tests are not supported for EIP-6800 transition.") raise Exception(f"Unknown fixture format: {fixture_format}") diff --git a/src/ethereum_test_tools/__init__.py b/src/ethereum_test_tools/__init__.py index 5d5750b7c8..5558a1d7fb 100644 --- a/src/ethereum_test_tools/__init__.py +++ b/src/ethereum_test_tools/__init__.py @@ -45,6 +45,7 @@ Storage, TestParameterGroup, Transaction, + VerkleTree, Withdrawal, WithdrawalRequest, add_kzg_version, @@ -132,6 +133,7 @@ "TransactionException", "Withdrawal", "WithdrawalRequest", + "VerkleTree", "Yul", "YulCompiler", "add_kzg_version", diff --git a/src/ethereum_test_types/__init__.py b/src/ethereum_test_types/__init__.py index b8b0cf5aba..61557f0c67 100644 --- a/src/ethereum_test_types/__init__.py +++ b/src/ethereum_test_types/__init__.py @@ -27,6 +27,7 @@ Requests, Storage, Transaction, + VerkleTree, Withdrawal, WithdrawalRequest, ) @@ -53,6 +54,7 @@ "TestPrivateKey", "TestPrivateKey2", "Transaction", + "VerkleTree", "Withdrawal", "WithdrawalRequest", "ZeroPaddedHexNumber", diff --git a/src/ethereum_test_types/types.py b/src/ethereum_test_types/types.py index 5474ee4ac2..ce75ed4a3d 100644 --- a/src/ethereum_test_types/types.py +++ b/src/ethereum_test_types/types.py @@ -373,6 +373,14 @@ class Environment(EnvironmentGeneric[Number]): parent_blob_gas_used: Number | None = Field(None) parent_excess_blob_gas: Number | None = Field(None) parent_beacon_block_root: Hash | None = Field(None) + verkle_conversion_address: Address | None = Field(None, alias="currentConversionAddress") + verkle_conversion_slot_hash: Hash | None = Field(None, alias="currentConversionSlotHash") + verkle_conversion_started: bool | None = Field(None, alias="currentConversionStarted") + verkle_conversion_ended: bool | None = Field(None, alias="currentConversionEnded") + verkle_conversion_storage_processed: bool | None = Field( + None, + alias="currentConversionStorageProcessed", + ) block_hashes: Dict[Number, Hash] = Field(default_factory=dict) ommers: List[Hash] = Field(default_factory=list) @@ -439,8 +447,41 @@ def set_fork_requirements(self, fork: Fork) -> "Environment": ): updated_values["parent_beacon_block_root"] = 0 + if fork.environment_verkle_conversion_starts(): + if self.verkle_conversion_ended: + # Conversion is marked as completed if this is the genesis block, or we are + # past the conversion end fork. + updated_values["verkle_conversion_ended"] = ( + number == 0 or fork.environment_verkle_conversion_completed() + ) + return self.copy(**updated_values) + # TODO: move this function, importing the Result type creates a circular import. + def update_from_result(self, result: Any) -> "Environment": + """ + Updates the environment with the result of a transition tool execution. + """ + if result.verkle_conversion_address: + self.verkle_conversion_address = result.verkle_conversion_address + if result.verkle_conversion_slot_hash: + self.verkle_conversion_slot_hash = result.verkle_conversion_slot_hash + # Boolean fields required to check if not None so we actually update them even when False + if result.verkle_conversion_started is not None: + conversion_started = result.verkle_conversion_started + assert isinstance(conversion_started, bool) + self.verkle_conversion_started = result.verkle_conversion_started + if result.verkle_conversion_ended is not None: + conversion_ended = result.verkle_conversion_ended + assert isinstance(conversion_ended, bool) + self.verkle_conversion_ended = result.verkle_conversion_ended + if result.verkle_conversion_storage_processed is not None: + conversion_storage_processed = result.verkle_conversion_storage_processed + assert isinstance(conversion_storage_processed, bool) + self.verkle_conversion_storage_processed = conversion_storage_processed + + return self + class AccessList(CamelModel): """ @@ -1232,3 +1273,12 @@ def consolidation_requests(self) -> List[ConsolidationRequest]: Returns the list of consolidation requests. """ return [c for c in self.root if isinstance(c, ConsolidationRequest)] + + +# TODO: use a type like HashInt but that doesn't pad zero. DO NOT PAD THE ZEROS. KEEP ALL ZEROS. +class VerkleTree(RootModel[Dict[str, str]]): + """ + Definition of a verkle tree return from the geth t8n. + """ + + root: Dict[str, str] = Field(default_factory=dict) diff --git a/src/evm_transition_tool/besu.py b/src/evm_transition_tool/besu.py index 86f226a4bc..dc4ef6b4e1 100644 --- a/src/evm_transition_tool/besu.py +++ b/src/evm_transition_tool/besu.py @@ -10,7 +10,7 @@ import textwrap from pathlib import Path from re import compile -from typing import List, Optional +from typing import Any, List, Optional import requests @@ -99,6 +99,7 @@ def evaluate( txs: List[Transaction], env: Environment, fork: Fork, + vkt: Any = None, chain_id: int = 1, reward: int = 0, eips: Optional[List[int]] = None, diff --git a/src/evm_transition_tool/file_utils.py b/src/evm_transition_tool/file_utils.py index d7f6c903a6..d8b6b1538a 100644 --- a/src/evm_transition_tool/file_utils.py +++ b/src/evm_transition_tool/file_utils.py @@ -24,6 +24,8 @@ def dump_files_to_directory(output_path: str, files: Dict[str, Any]) -> None: """ os.makedirs(output_path, exist_ok=True) for file_rel_path_flags, file_contents in files.items(): + if file_contents is None: + continue file_rel_path, flags = ( file_rel_path_flags.split("+") if "+" in file_rel_path_flags diff --git a/src/evm_transition_tool/geth.py b/src/evm_transition_tool/geth.py index 87a805b0e4..e9fa10e9c2 100644 --- a/src/evm_transition_tool/geth.py +++ b/src/evm_transition_tool/geth.py @@ -3,14 +3,18 @@ """ import json +import os import shutil import subprocess +import tempfile import textwrap from pathlib import Path from re import compile from typing import Optional from ethereum_test_forks import Fork +from ethereum_test_tools.common.json import to_json +from ethereum_test_tools.common.types import Alloc, VerkleTree from .transition_tool import FixtureFormats, TransitionTool, dump_files_to_directory @@ -25,6 +29,7 @@ class GethTransitionTool(TransitionTool): t8n_subcommand: Optional[str] = "t8n" statetest_subcommand: Optional[str] = "statetest" blocktest_subcommand: Optional[str] = "blocktest" + verkle_subcommand: Optional[str] = "verkle" binary: Path cached_version: Optional[str] = None @@ -137,3 +142,39 @@ def verify_fixture( else: result_json = [] # there is no parseable format for blocktest output return result_json + + def from_mpt_to_vkt(self, mpt_alloc: Alloc) -> VerkleTree: + """ + Returns the verkle tree representation for an entire MPT alloc using the verkle subcommand. + """ + # Write the MPT alloc to a temporary file: alloc.json + with tempfile.TemporaryDirectory() as temp_dir: + input_dir = os.path.join(temp_dir, "input") + os.mkdir(input_dir) + alloc_path = os.path.join(input_dir, "alloc.json") + with open(alloc_path, "w") as f: + json.dump(to_json(mpt_alloc), f) + + # Check if the file was created + if not os.path.exists(alloc_path): + raise Exception(f"Failed to create alloc.json at {alloc_path}") + + # Run the verkle subcommand with the alloc.json file as input + command = [ + str(self.binary), + str(self.verkle_subcommand), + "tree-keys", + "--input.alloc", + alloc_path, + ] + result = subprocess.run( + command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + if result.returncode != 0: + raise Exception( + f"Failed to run verkle subcommand: '{' '.join(command)}'. " + f"Error: '{result.stderr.decode()}'" + ) + return VerkleTree(json.loads(result.stdout.decode())) diff --git a/src/evm_transition_tool/transition_tool.py b/src/evm_transition_tool/transition_tool.py index 9d485eabca..f341452d1f 100644 --- a/src/evm_transition_tool/transition_tool.py +++ b/src/evm_transition_tool/transition_tool.py @@ -13,11 +13,11 @@ from itertools import groupby from pathlib import Path from re import Pattern -from typing import Dict, List, Mapping, Optional, Type +from typing import Any, Dict, List, Mapping, Optional, Type from ethereum_test_fixtures import FixtureFormats, FixtureVerifier from ethereum_test_forks import Fork -from ethereum_test_types import Alloc, Environment, Transaction +from ethereum_test_types import Alloc, Environment, Transaction, VerkleTree from .file_utils import dump_files_to_directory, write_json_file from .types import TransactionReceipt, TransitionToolInput, TransitionToolOutput @@ -59,6 +59,7 @@ class TransitionTool(FixtureVerifier): blocktest_subcommand: Optional[str] = None cached_version: Optional[str] = None t8n_use_stream: bool = True + verkle_subcommand: Optional[str] = None # Abstract methods that each tool must implement @@ -247,6 +248,7 @@ class TransitionToolData: txs: List[Transaction] env: Environment fork_name: str + vkt: Any = None chain_id: int = field(default=1) reward: int = field(default=0) @@ -254,11 +256,7 @@ def to_input(self) -> TransitionToolInput: """ Convert the data to a TransactionToolInput object """ - return TransitionToolInput( - alloc=self.alloc, - txs=self.txs, - env=self.env, - ) + return TransitionToolInput(alloc=self.alloc, txs=self.txs, env=self.env, vkt=self.vkt) def _evaluate_filesystem( self, @@ -286,7 +284,6 @@ def _evaluate_filesystem( } output_paths["body"] = os.path.join("output", "txs.rlp") - # Construct args for evmone-t8n binary args = [ str(self.binary), "--state.fork", @@ -311,6 +308,13 @@ def _evaluate_filesystem( str(t8n_data.chain_id), ] + # TODO: Verkle specific logic, update when Verkle fork is confirmed. + if t8n_data.fork_name == "Prague": + output_paths["vkt"] = os.path.join("output", "vkt.json") + args.extend(["--output.vkt", output_paths["vkt"]]) + if t8n_data.vkt is not None: + args.extend(["--input.vkt", input_paths["vkt"]]) + if self.trace: args.append("--trace") @@ -402,13 +406,18 @@ def _evaluate_stream( output: TransitionToolOutput = TransitionToolOutput.model_validate_json(result.stdout) if debug_output_path: + files_to_dump = { + "output/alloc.json": output.alloc, + "output/result.json": output.result, + "output/txs.rlp": str(output.body), + } + # Only dump verkle if present + if output.vkt: + files_to_dump["output/vkt.json"] = output.vkt + dump_files_to_directory( debug_output_path, - { - "output/alloc.json": output.alloc, - "output/result.json": output.result, - "output/txs.rlp": str(output.body), - }, + files_to_dump, ) if self.trace: @@ -427,10 +436,9 @@ def construct_args_stream( if self.t8n_subcommand: command.append(self.t8n_subcommand) - args = command + [ - "--input.alloc=stdin", - "--input.txs=stdin", - "--input.env=stdin", + args = command + ["--input.alloc=stdin", "--input.txs=stdin", "--input.env=stdin"] + + args += [ "--output.result=stdout", "--output.alloc=stdout", "--output.body=stdout", @@ -439,6 +447,12 @@ def construct_args_stream( f"--state.reward={t8n_data.reward}", ] + # TODO: Verkle specific logic, update when Verkle fork is confirmed. + if t8n_data.fork_name == "Prague": + args.append("--output.vkt=stdout") + if t8n_data.vkt is not None: + args.append("--input.vkt=stdin") + if self.trace: args.append("--trace") args.append(f"--output.basedir={temp_dir.name}") @@ -470,21 +484,27 @@ def dump_debug_stream( {t8n_call} < {debug_output_path}/stdin.txt """ ) + + files_to_dump = { + "args.py": args, + "input/alloc.json": stdin.alloc, + "input/env.json": stdin.env, + "input/txs.json": [ + tx.model_dump(mode="json", **model_dump_config) for tx in stdin.txs + ], + "returncode.txt": result.returncode, + "stdin.txt": stdin, + "stdout.txt": result.stdout.decode(), + "stderr.txt": result.stderr.decode(), + "t8n.sh+x": t8n_script, + } + # Only dump verkle if present + if stdin.vkt: + files_to_dump["input/vkt.json"] = stdin.vkt + dump_files_to_directory( debug_output_path, - { - "args.py": args, - "input/alloc.json": stdin.alloc, - "input/env.json": stdin.env, - "input/txs.json": [ - tx.model_dump(mode="json", **model_dump_config) for tx in stdin.txs - ], - "returncode.txt": result.returncode, - "stdin.txt": stdin, - "stdout.txt": result.stdout.decode(), - "stderr.txt": result.stderr.decode(), - "t8n.sh+x": t8n_script, - }, + files_to_dump, ) def evaluate( @@ -494,6 +514,7 @@ def evaluate( txs: List[Transaction], env: Environment, fork: Fork, + vkt: Any = None, chain_id: int = 1, reward: int = 0, eips: Optional[List[int]] = None, @@ -518,6 +539,7 @@ def evaluate( txs=txs, env=env, fork_name=fork_name, + vkt=vkt, chain_id=chain_id, reward=reward, ) @@ -545,3 +567,13 @@ def verify_fixture( raise NotImplementedError( "The `verify_fixture()` function is not supported by this tool. Use geth's evm tool." ) + + def from_mpt_to_vkt(self, mpt_alloc: Alloc) -> VerkleTree: + """ + Returns the verkle tree representation for an entire MPT alloc using the verkle subcommand. + + Currently only implemented by geth's evm. + """ + raise NotImplementedError( + "The `from_mpt_to_vkt` function is not supported by this tool. Use geth's evm tool." + ) diff --git a/src/pytest_plugins/consume/hive_simulators/ruleset.py b/src/pytest_plugins/consume/hive_simulators/ruleset.py index 9af685d7a6..c53493f2c0 100644 --- a/src/pytest_plugins/consume/hive_simulators/ruleset.py +++ b/src/pytest_plugins/consume/hive_simulators/ruleset.py @@ -322,6 +322,21 @@ "HIVE_SHANGHAI_TIMESTAMP": 0, "HIVE_CANCUN_TIMESTAMP": 15000, }, + "ShanghaiToPragueAt32": { # TODO: Make this verkle specific + "HIVE_FORK_HOMESTEAD": 0, + "HIVE_FORK_TANGERINE": 0, + "HIVE_FORK_SPURIOUS": 0, + "HIVE_FORK_BYZANTIUM": 0, + "HIVE_FORK_CONSTANTINOPLE": 0, + "HIVE_FORK_PETERSBURG": 0, + "HIVE_FORK_ISTANBUL": 0, + "HIVE_FORK_BERLIN": 0, + "HIVE_FORK_LONDON": 0, + "HIVE_FORK_MERGE": 0, + "HIVE_TERMINAL_TOTAL_DIFFICULTY": 0, + "HIVE_SHANGHAI_TIMESTAMP": 0, + "HIVE_PRAGUE_TIMESTAMP": 32, + }, "Prague": { "HIVE_FORK_HOMESTEAD": 0, "HIVE_FORK_TANGERINE": 0, diff --git a/tests/osaka/eip6800_verkle_tree/__init__.py b/tests/osaka/eip6800_verkle_tree/__init__.py new file mode 100644 index 0000000000..41c353dafa --- /dev/null +++ b/tests/osaka/eip6800_verkle_tree/__init__.py @@ -0,0 +1,6 @@ +""" +Test cases for all EIPs in Verkle +""" + +VERKLE_FORK_NAME = "ShanghaiEIP6800" +VERKLE_TRANSITION_FORK_NAME = "EIP6800Transition" diff --git a/tests/osaka/eip6800_verkle_tree/test_verkle_from_mpt_conversion.py b/tests/osaka/eip6800_verkle_tree/test_verkle_from_mpt_conversion.py new file mode 100644 index 0000000000..062daf4181 --- /dev/null +++ b/tests/osaka/eip6800_verkle_tree/test_verkle_from_mpt_conversion.py @@ -0,0 +1,75 @@ +""" +abstract: Tests [EIP-6800: Ethereum state using a unified verkle tree](https://eips.ethereum.org/EIPS/eip-6800) + Test state tree conversion from MPT [EIP-6800: Ethereum state using a unified verkle tree](https://eips.ethereum.org/EIPS/eip-6800) + +""" # noqa: E501 + +from itertools import count +from typing import List, Mapping + +import pytest + +from ethereum_test_tools import Account, Address, Block, BlockchainTestFiller, Environment, Hash +from ethereum_test_tools import Opcodes as Op +from ethereum_test_tools import Storage, TestAddress, Transaction + +from . import VERKLE_TRANSITION_FORK_NAME + +code_address = Address(0x100) + +REFERENCE_SPEC_GIT_PATH = "EIPS/eip-6800.md" +REFERENCE_SPEC_VERSION = "2f8299df31bb8173618901a03a8366a3183479b0" + + +@pytest.fixture +def pre() -> Mapping: # noqa: D103 + return { + TestAddress: Account(balance=10**40), + code_address: Account( + nonce=1, + balance=1, + code=Op.SSTORE(Op.CALLDATALOAD(0), Op.CALLDATALOAD(32)), + ), + } + + +@pytest.mark.valid_from(VERKLE_TRANSITION_FORK_NAME) +def test_verkle_from_mpt_conversion( + blockchain_test: BlockchainTestFiller, + pre: Mapping[str, Account], +): + """ + Tests the basic case for the conversion from MPT to Verkle tree. + """ + nonce = count() + block_count = 4 + tx_count = 64 + blocks: List[Block] = [] + code_storage = Storage() + for b in range(block_count): + txs: List[Transaction] = [] + for t in range(tx_count): + storage_value = 2**256 - t - 1 - b * tx_count + storage_key = code_storage.store_next(storage_value) + txs.append( + Transaction( + nonce=next(nonce), + to=code_address, + data=Hash(storage_key) + Hash(storage_value), + gas_limit=100_000, + ) + ) + blocks.append(Block(txs=txs)) + blockchain_test( + genesis_environment=Environment(), + pre=pre, + post={ + code_address: Account( + nonce=1, + balance=1, + code=Op.SSTORE(Op.CALLDATALOAD(0), Op.CALLDATALOAD(32)), + storage=code_storage, + ), + }, + blocks=blocks, + ) diff --git a/tests/osaka/eip6800_verkle_tree/tracker.md b/tests/osaka/eip6800_verkle_tree/tracker.md new file mode 100644 index 0000000000..2481217143 --- /dev/null +++ b/tests/osaka/eip6800_verkle_tree/tracker.md @@ -0,0 +1,23 @@ +# Verkle Testing Coverage Tracker + +## EIP-6800: Ethereum state using a unified verkle tree + +- [ ] +- ... + +## EIP-4762: Statelessness gas cost change + +- [ ] +- ... + +## EIP-7545: Verkle proof verification precompile + +- [ ] +- ... + +## EIP-2935: Save historical block hashes in state + +Note: this is CFI'd for Prague + +- [ ] +- ... diff --git a/whitelist.txt b/whitelist.txt index 6ff9a86747..5f47edf6ab 100644 --- a/whitelist.txt +++ b/whitelist.txt @@ -54,6 +54,7 @@ caller callvalue cancun cd +CFI chainid changelog chfast @@ -104,6 +105,8 @@ dunder EEST eip eip3540 +P6800 +P7692 eips EIPs eip6110 @@ -239,6 +242,7 @@ mixhash mkdocs mkdocstrings msm +mpt mypy namespace nav @@ -360,6 +364,7 @@ textwrap tf ThreeHrSleep time15k +Time32 timestamp tmp todo @@ -393,7 +398,9 @@ v3 validator validators venv +verkle visualstudio +vkt vm vscode vv