diff --git a/src/cli/pytest_commands/consume.py b/src/cli/pytest_commands/consume.py index fb99874e872..3f7adc50d84 100644 --- a/src/cli/pytest_commands/consume.py +++ b/src/cli/pytest_commands/consume.py @@ -47,6 +47,10 @@ def get_command_logic_test_paths(command_name: str) -> List[Path]: command_logic_test_paths = [ base_path / "simulators" / "simulator_logic" / "test_via_sync.py" ] + elif command_name == "production": + command_logic_test_paths = [ + base_path / "simulators" / "simulator_logic" / "test_via_production.py" + ] elif command_name == "direct": command_logic_test_paths = [base_path / "direct" / "test_via_direct.py"] else: @@ -116,6 +120,12 @@ def sync() -> None: pass +@consume_command(is_hive=True) +def production() -> None: + """Client builds blocks from mempool transactions (tests block production).""" + pass + + @consume.command( context_settings={"ignore_unknown_options": True}, ) diff --git a/src/cli/pytest_commands/processors.py b/src/cli/pytest_commands/processors.py index b63cd3a132b..71f250d678e 100644 --- a/src/cli/pytest_commands/processors.py +++ b/src/cli/pytest_commands/processors.py @@ -104,6 +104,8 @@ def process_args(self, args: List[str]) -> List[str]: modified_args.extend(["-p", "pytest_plugins.consume.simulators.engine.conftest"]) elif self.command_name == "sync": modified_args.extend(["-p", "pytest_plugins.consume.simulators.sync.conftest"]) + elif self.command_name == "production": + modified_args.extend(["-p", "pytest_plugins.consume.simulators.production.conftest"]) elif self.command_name == "rlp": modified_args.extend(["-p", "pytest_plugins.consume.simulators.rlp.conftest"]) else: diff --git a/src/pytest_plugins/consume/simulators/base.py b/src/pytest_plugins/consume/simulators/base.py index 22736c97eba..0e8c10c5ec6 100644 --- a/src/pytest_plugins/consume/simulators/base.py +++ b/src/pytest_plugins/consume/simulators/base.py @@ -27,7 +27,11 @@ def check_live_port(test_suite_name: str) -> Literal[8545, 8551]: """Port used by hive to check for liveness of the client.""" if test_suite_name == "eest/consume-rlp": return 8545 - elif test_suite_name in {"eest/consume-engine", "eest/consume-sync"}: + elif test_suite_name in { + "eest/consume-engine", + "eest/consume-sync", + "eest/consume-production", + }: return 8551 raise ValueError( f"Unexpected test suite name '{test_suite_name}' while setting HIVE_CHECK_LIVE_PORT." diff --git a/src/pytest_plugins/consume/simulators/production/__init__.py b/src/pytest_plugins/consume/simulators/production/__init__.py new file mode 100644 index 00000000000..7ad41c83aab --- /dev/null +++ b/src/pytest_plugins/consume/simulators/production/__init__.py @@ -0,0 +1,3 @@ +""" +Consume Production simulator. Tests block PRODUCTION (building) instead of validation. +""" diff --git a/src/pytest_plugins/consume/simulators/production/conftest.py b/src/pytest_plugins/consume/simulators/production/conftest.py new file mode 100644 index 00000000000..49c6b61d24c --- /dev/null +++ b/src/pytest_plugins/consume/simulators/production/conftest.py @@ -0,0 +1,228 @@ +""" +Pytest fixtures for the `consume production` simulator. + +Tests block PRODUCTION (not just validation) by having clients build blocks +from mempool transactions using forkchoiceUpdated + getPayload. +""" + +import io +import logging +from typing import Mapping + +import pytest +from hive.client import Client + +from ethereum_test_exceptions import ExceptionMapper +from ethereum_test_fixtures import BlockchainEngineFixture +from ethereum_test_rpc import EngineRPC + +pytest_plugins = ( + "pytest_plugins.pytest_hive.pytest_hive", + "pytest_plugins.consume.simulators.base", + "pytest_plugins.consume.simulators.single_test_client", + "pytest_plugins.consume.simulators.test_case_description", + "pytest_plugins.consume.simulators.timing_data", + "pytest_plugins.consume.simulators.exceptions", +) +logger = logging.getLogger(__name__) + + +def pytest_configure(config: pytest.Config) -> None: + """Set the supported fixture formats for the production simulator.""" + config.supported_fixture_formats = [BlockchainEngineFixture] # type: ignore[attr-defined] + + +def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: + """ + Filter out tests that don't meet production simulator requirements. + """ + with open("/tmp/production_filter_debug.log", "w") as f: + f.write("=" * 80 + "\n") + f.write("COLLECTION PHASE STARTING\n") + f.write("=" * 80 + "\n\n") + + for item in items: + if not hasattr(item, "callspec"): + continue + + # Check the actual function being called + test_function_name = item.function.__name__ if hasattr(item, "function") else None + + # Only process if this is a production test + if test_function_name != "test_blockchain_via_production": + continue + + f.write(f"\nTest: {item.nodeid}\n") + + # Get test_case from parameters + test_case = item.callspec.params.get("test_case") + if test_case is None: + f.write(" >>> No test_case in params, skipping <<<\n") + continue + + f.write(f" test_case type: {type(test_case).__name__}\n") + f.write(f" test_case.format: {test_case.format}\n") + + # Check if this is a BlockchainEngineFixture format + if test_case.format != BlockchainEngineFixture: + f.write(" >>> Not BlockchainEngineFixture format, skipping <<<\n") + continue + + f.write(" >>> Is BlockchainEngineFixture format <<<\n") + + # Now we need to actually load the fixture to check payloads + # Get the fixtures_source from config + fixtures_source = item.config.fixtures_source # type: ignore[attr-defined] + + # Load the fixture the same way the test does + from ethereum_test_fixtures.file import Fixtures + + if fixtures_source.is_stdin: + # For stdin, fixture is already in test_case + from ethereum_test_fixtures.consume import TestCaseStream + + if isinstance(test_case, TestCaseStream): + fixture = test_case.fixture + else: + f.write(" >>> Can't load fixture from stdin test_case <<<\n") + continue + else: + # For file-based, load from disk + from ethereum_test_fixtures.consume import TestCaseIndexFile + + if not isinstance(test_case, TestCaseIndexFile): + f.write(" >>> Not TestCaseIndexFile <<<\n") + continue + + fixtures_file_path = fixtures_source.path / test_case.json_path + f.write(f" Loading from: {fixtures_file_path}\n") + + if not fixtures_file_path.exists(): + f.write(" >>> File doesn't exist <<<\n") + continue + + fixtures = Fixtures.model_validate_json(fixtures_file_path.read_text()) + fixture = fixtures[test_case.id] + + f.write(f" Fixture loaded! Type: {type(fixture).__name__}\n") + + if not isinstance(fixture, BlockchainEngineFixture): + f.write(" >>> Loaded fixture is not BlockchainEngineFixture <<<\n") + continue + + f.write(f" Number of payloads: {len(fixture.payloads)}\n") + + # Filter: only single-transaction payloads + has_multi_tx_payload = False + has_invalid_payload = False + has_zero_tx_payload = False + + for i, payload in enumerate(fixture.payloads): + f.write(f"\n Payload {i}:\n") + + if hasattr(payload, "valid"): + try: + valid_result = payload.valid() + f.write(f" payload.valid() = {valid_result}\n") + except Exception as e: + f.write(f" payload.valid() ERROR: {e}\n") + + if hasattr(payload, "validation_error"): + f.write(f" payload.validation_error = {payload.validation_error}\n") + + if hasattr(payload, "error_code"): + f.write(f" payload.error_code = {payload.error_code}\n") + + # Count transactions + tx_count = len(payload.params[0].transactions) + f.write(f" Transaction count: {tx_count}\n") + + if tx_count == 0: + has_zero_tx_payload = True + break + + if tx_count > 1: + has_multi_tx_payload = True + break + + # Skip invalid payloads + should_skip = False + try: + if not payload.valid(): + should_skip = True + f.write(" payload.valid() returned False\n") + except: + pass + + if payload.validation_error is not None: + should_skip = True + f.write(" Has validation_error\n") + + if payload.error_code is not None: + should_skip = True + f.write(" Has error_code\n") + + if should_skip: + f.write(" >>> MARKING AS INVALID <<<\n") + has_invalid_payload = True + break + + if has_zero_tx_payload: + f.write("\n >>> WILL SKIP: zero transactions <<<\n") + item.add_marker( + pytest.mark.skip( + reason="Production simulator: zero-transaction payloads not supported" + ) + ) + elif has_multi_tx_payload: + f.write("\n >>> WILL SKIP: multiple transactions <<<\n") + item.add_marker( + pytest.mark.skip( + reason="Production simulator: multi-transaction payloads not supported" + ) + ) + elif has_invalid_payload: + f.write("\n >>> WILL SKIP: invalid payload <<<\n") + item.add_marker( + pytest.mark.skip( + reason="Production simulator: only tests valid block production" + ) + ) + else: + f.write("\n >>> TEST WILL RUN <<<\n") + + +@pytest.fixture(scope="function") +def engine_rpc(client: Client, client_exception_mapper: ExceptionMapper | None) -> EngineRPC: + """Initialize engine RPC client for the execution client under test.""" + if client_exception_mapper: + return EngineRPC( + f"http://{client.ip}:8551", + response_validation_context={ + "exception_mapper": client_exception_mapper, + }, + ) + return EngineRPC(f"http://{client.ip}:8551") + + +@pytest.fixture(scope="module") +def test_suite_name() -> str: + """The name of the hive test suite used in this simulator.""" + return "eest/consume-production" + + +@pytest.fixture(scope="module") +def test_suite_description() -> str: + """The description of the hive test suite used in this simulator.""" + return ( + "Test block PRODUCTION (not validation) by having clients build blocks from " + "mempool transactions using forkchoiceUpdated + getPayload flow." + ) + + +@pytest.fixture(scope="function") +def client_files(buffered_genesis: io.BufferedReader) -> Mapping[str, io.BufferedReader]: + """Define the files that hive will start the client with.""" + files = {} + files["/genesis.json"] = buffered_genesis + return files diff --git a/src/pytest_plugins/consume/simulators/production/helpers/__init__.py b/src/pytest_plugins/consume/simulators/production/helpers/__init__.py new file mode 100644 index 00000000000..5e2d8b6635f --- /dev/null +++ b/src/pytest_plugins/consume/simulators/production/helpers/__init__.py @@ -0,0 +1 @@ +"""Helper functions for production simulator.""" diff --git a/src/pytest_plugins/consume/simulators/production/helpers/block_building.py b/src/pytest_plugins/consume/simulators/production/helpers/block_building.py new file mode 100644 index 00000000000..85bc6439624 --- /dev/null +++ b/src/pytest_plugins/consume/simulators/production/helpers/block_building.py @@ -0,0 +1,62 @@ +"""Helper functions for block production testing.""" + +import time +from typing import Any + +from ethereum_test_base_types import Bytes, Hash +from ethereum_test_rpc import EthRPC + + +def wait_for_transaction_in_mempool( + eth_rpc: EthRPC, + tx_hash: Hash, + timeout: int = 10, + poll_interval: float = 0.1, +) -> bool: + """ + Wait for a transaction to appear in the mempool. + + Returns True if transaction found, False if timeout reached. + """ + start = time.time() + while time.time() - start < timeout: + try: + tx = eth_rpc.get_transaction_by_hash(tx_hash) + if tx is not None: + return True + except Exception: + pass + time.sleep(poll_interval) + + return False + + +def wait_for_payload_ready( + engine_rpc: Any, + payload_id: Bytes, + get_payload_version: int, + timeout: float = 5.0, + poll_interval: float = 0.1, +) -> Any: + """ + Poll until payload is ready to be retrieved. + + Returns the built payload response when ready. + Raises TimeoutError if not ready within timeout. + """ + start = time.time() + last_exception = None + + while time.time() - start < timeout: + try: + built_payload_response = engine_rpc.get_payload( + payload_id=payload_id, + version=get_payload_version, + ) + return built_payload_response + except Exception as e: + last_exception = e + time.sleep(poll_interval) + + elapsed = time.time() - start + raise TimeoutError(f"Payload not ready after {elapsed:.2f}s. Last error: {last_exception}") diff --git a/src/pytest_plugins/consume/simulators/simulator_logic/test_via_production.py b/src/pytest_plugins/consume/simulators/simulator_logic/test_via_production.py new file mode 100644 index 00000000000..67840c4ed43 --- /dev/null +++ b/src/pytest_plugins/consume/simulators/simulator_logic/test_via_production.py @@ -0,0 +1,434 @@ +""" +Test block production by having clients build blocks from mempool transactions. + +This tests the actual block PRODUCTION code path (mining/building), not just +validation. This catches bugs like the Erigon InitializeBlockExecution issue +where the validation path worked but production path failed. + +Flow: +1. Initialize client with genesis +2. Send transaction to mempool via eth_sendRawTransaction +3. Verify transaction is in mempool +4. Request block building via engine_forkchoiceUpdated (with payload_attributes) +5. Poll until block is built +6. Retrieve built block via engine_getPayload +7. Verify block matches expected state/gas/etc from fixture +8. Submit block via engine_newPayload (verify client accepts its own work) +9. Finalize via engine_forkchoiceUpdated +10. Verify transaction executed successfully +""" + +import time + +from ethereum_test_base_types import Hash +from ethereum_test_fixtures import BlockchainEngineFixture +from ethereum_test_rpc import EngineRPC, EthRPC +from ethereum_test_rpc.rpc_types import ( + ForkchoiceState, + PayloadAttributes, + PayloadStatusEnum, +) +from ethereum_test_types.trie import keccak256 + +from ....custom_logging import get_logger +from ..helpers.exceptions import GenesisBlockMismatchExceptionError +from ..helpers.timing import TimingData +from ..production.helpers.block_building import ( + wait_for_payload_ready, + wait_for_transaction_in_mempool, +) + +logger = get_logger(__name__) + +MAX_RETRIES = 30 +DELAY_BETWEEN_RETRIES_IN_SEC = 1 + + +class LoggedError(Exception): + """Exception that uses the logger to log the failure.""" + + def __init__(self, *args: object) -> None: + """Initialize the exception and log the failure.""" + super().__init__(*args) + logger.fail(str(self)) + + +def _get_payload_version(fork_str: str) -> int: + """ + Determine correct getPayload version based on fork name. + + CRITICAL: Osaka requires getPayloadV5, Prague requires V4! + + Fork | forkchoiceUpdated | getPayload | newPayload + ----------|-------------------|------------|----------- + Paris | V1 | V1 | V1 + Shanghai | V2 | V2 | V2 + Cancun | V3 | V3 | V3 + Prague | V3 | V4 | V4 + Osaka | V3 | V5 | V4 + """ + if "Osaka" in fork_str or "Amsterdam" in fork_str or "BPO" in fork_str: + return 5 + elif "Prague" in fork_str: + return 4 + elif "Cancun" in fork_str: + return 3 + elif "Shanghai" in fork_str: + return 2 + else: + return 1 + + +def test_blockchain_via_production( + timing_data: TimingData, + eth_rpc: EthRPC, + engine_rpc: EngineRPC, + fixture: BlockchainEngineFixture, +) -> None: + """ + Test block production by having the client build blocks from mempool. + + Key difference from consume engine (validation): + - consume engine: gives client pre-built blocks to validate + - consume production: client BUILDS the blocks from transactions + + This tests the mining/production code path that validators use. + """ + # Send initial forkchoice update to genesis + with timing_data.time("Initial forkchoice update"): + logger.info("Sending initial forkchoice update to genesis block...") + for attempt in range(1, MAX_RETRIES + 1): + forkchoice_response = engine_rpc.forkchoice_updated( + forkchoice_state=ForkchoiceState( + head_block_hash=fixture.genesis.block_hash, + ), + payload_attributes=None, + version=fixture.payloads[0].forkchoice_updated_version, + ) + status = forkchoice_response.payload_status.status + logger.info(f"Initial forkchoice update response attempt {attempt}: {status}") + if status != PayloadStatusEnum.SYNCING: + break + + if attempt < MAX_RETRIES: + time.sleep(DELAY_BETWEEN_RETRIES_IN_SEC) + + if forkchoice_response.payload_status.status != PayloadStatusEnum.VALID: + logger.error( + f"Client failed to initialize properly after {MAX_RETRIES} attempts, " + f"final status: {forkchoice_response.payload_status.status}" + ) + raise LoggedError( + f"unexpected status on forkchoice updated to genesis: {forkchoice_response}" + ) + + # Verify genesis + with timing_data.time("Get genesis block"): + logger.info("Calling getBlockByNumber to get genesis block...") + genesis_block = eth_rpc.get_block_by_number(0) + assert genesis_block is not None, "genesis_block is None" + if genesis_block["hash"] != str(fixture.genesis.block_hash): + expected = fixture.genesis.block_hash + got = genesis_block["hash"] + logger.fail(f"Genesis block hash mismatch. Expected: {expected}, Got: {got}") + raise GenesisBlockMismatchExceptionError( + expected_header=fixture.genesis, + got_genesis_block=genesis_block, + ) + + fork_str = str(fixture.fork) + get_payload_version = _get_payload_version(fork_str) + logger.info( + f"Fork: {fork_str}, " + f"Using getPayloadV{get_payload_version}, " + f"newPayloadV{fixture.payloads[0].new_payload_version}, " + f"forkchoiceUpdatedV{fixture.payloads[0].forkchoice_updated_version}" + ) + + # Process each payload by having client BUILD the block + with timing_data.time("Block production") as total_production_timing: + logger.info(f"Starting production of {len(fixture.payloads)} blocks...") + + for i, payload in enumerate(fixture.payloads): + logger.info(f"Processing payload {i + 1}/{len(fixture.payloads)}...") + + with total_production_timing.time(f"Payload {i + 1}") as payload_timing: + # Extract the transaction from the payload + expected_execution_payload = payload.params[0] + transactions = expected_execution_payload.transactions + + # Single transaction check (enforced by conftest filtering) + assert len(transactions) == 1, ( + "Production simulator requires exactly 1 transaction per payload " + "(should be filtered by conftest)" + ) + + tx_rlp = transactions[0] + + # Step 1: Send transaction to mempool and verify it's there + with payload_timing.time("eth_sendRawTransaction + mempool verification"): + logger.info("Sending transaction to mempool...") + tx_hash = eth_rpc.send_raw_transaction(tx_rlp) + logger.info(f"Transaction sent: {tx_hash}") + + # Wait for transaction to appear in mempool + logger.info("Verifying transaction is in mempool...") + if not wait_for_transaction_in_mempool(eth_rpc, tx_hash, timeout=5): + raise LoggedError( + f"Transaction {tx_hash} not in mempool after 5s. " + f"Client may not be accepting transactions." + ) + + logger.info(f"Transaction confirmed in mempool: {tx_hash}") + + # Give the client additional time for the transaction to be + # processed into the pending pool that the block builder uses. + logger.info("Waiting for transaction to be processed into pending pool...") + time.sleep(2.0) + + # Step 2: Request block building + with payload_timing.time("engine_forkchoiceUpdated (request build)"): + logger.info("Requesting block building...") + + # Get current head + head_block = eth_rpc.get_block_by_number("latest") + assert head_block is not None + + # Get parent_beacon_block_root from payload params if present (Cancun+) + # params[0] = execution_payload + # params[1] = expected_blob_versioned_hashes (if blobs present) + # params[2] = parent_beacon_block_root (Cancun+) + parent_beacon_block_root = None + if len(payload.params) >= 3: + parent_beacon_block_root = payload.params[2] + + # Create payload attributes to trigger building + # Handle different fork versions: + # - PayloadAttributesV1 (Paris): no withdrawals, no parent_beacon_block_root + # - PayloadAttributesV2 (Shanghai): has withdrawals, no parent_beacon_block_root + # - PayloadAttributesV3 (Cancun+): has withdrawals, has parent_beacon_block_root + + # Build PayloadAttributes conditionally based on available fields + if parent_beacon_block_root is not None: + # Cancun+ (V3): has parent_beacon_block_root + payload_attributes = PayloadAttributes( + timestamp=expected_execution_payload.timestamp, + prev_randao=expected_execution_payload.prev_randao, + suggested_fee_recipient=expected_execution_payload.fee_recipient, + withdrawals=getattr(expected_execution_payload, "withdrawals", None), + parent_beacon_block_root=parent_beacon_block_root, + ) + elif hasattr(expected_execution_payload, "withdrawals"): + # Shanghai (V2): has withdrawals but no parent_beacon_block_root + payload_attributes = PayloadAttributes( + timestamp=expected_execution_payload.timestamp, + prev_randao=expected_execution_payload.prev_randao, + suggested_fee_recipient=expected_execution_payload.fee_recipient, + withdrawals=expected_execution_payload.withdrawals, + ) + else: + # Paris (V1): no withdrawals, no parent_beacon_block_root + payload_attributes = PayloadAttributes( + timestamp=expected_execution_payload.timestamp, + prev_randao=expected_execution_payload.prev_randao, + suggested_fee_recipient=expected_execution_payload.fee_recipient, + ) + + forkchoice_response = engine_rpc.forkchoice_updated( + forkchoice_state=ForkchoiceState( + head_block_hash=head_block["hash"], + ), + payload_attributes=payload_attributes, + version=payload.forkchoice_updated_version, + ) + + if forkchoice_response.payload_status.status != PayloadStatusEnum.VALID: + raise LoggedError( + f"Forkchoice update for building failed: {forkchoice_response}" + ) + + payload_id = forkchoice_response.payload_id + if payload_id is None: + raise LoggedError("No payload_id returned from forkchoice_updated") + + logger.info(f"Block building requested, payload_id: {payload_id}") + + # Step 3: Poll until block is built + with payload_timing.time("Wait for block building"): + logger.info("Waiting for client to build block...") + # Give client time to select transactions from mempool + # Most clients need at least 1-2 seconds to build a proper block + time.sleep(3.0) + try: + built_payload_response = wait_for_payload_ready( + engine_rpc=engine_rpc, + payload_id=payload_id, + get_payload_version=get_payload_version, + timeout=10.0, + poll_interval=0.5, + ) + logger.info("Block building complete!") + except TimeoutError as e: + raise LoggedError( + f"Block not ready after timeout. " + f"Check if client is actually building blocks. " + f"Error: {e}" + ) from e + + # Step 4: Verify the built block + built_execution_payload = built_payload_response.execution_payload + logger.info(f"Got built block: {built_execution_payload.block_hash}") + + with payload_timing.time("Verify built block"): + logger.info("Verifying built block against fixture expectations...") + + # Check transaction is included (need to hash the RLP to compare) + built_tx_hashes = [ + Hash(keccak256(tx)) for tx in built_execution_payload.transactions + ] + if tx_hash not in built_tx_hashes: + raise LoggedError( + f"Built block doesn't contain our transaction {tx_hash}. " + f"Found transactions: {built_tx_hashes}" + ) + + # Check gas used matches expectations + expected_gas = expected_execution_payload.gas_used + actual_gas = built_execution_payload.gas_used + if actual_gas != expected_gas: + gas_diff = actual_gas - expected_gas + raise LoggedError( + f"Gas mismatch: expected {expected_gas}, got {actual_gas} " + f"(diff: {gas_diff:+d}). " + f"This indicates the client's block building code has issues " + f"(like the Erigon InitializeBlockExecution bug). " + f"Expected gas includes system contract initialization." + ) + + # Check state root matches + expected_state_root = expected_execution_payload.state_root + actual_state_root = built_execution_payload.state_root + if actual_state_root != expected_state_root: + raise LoggedError( + f"State root mismatch: expected {expected_state_root}, " + f"got {actual_state_root}. " + f"Client's state transition during block building is incorrect." + ) + + logger.info("Built block verification passed!") + + # Step 5: Submit the built block back to client (sanity check) + with payload_timing.time(f"engine_newPayloadV{payload.new_payload_version}"): + logger.info("Submitting built block back to client...") + + # Reconstruct newPayload args with the BUILT execution payload + # but keep other params (blob hashes, parent beacon block root, execution requests) + # from the original fixture + if payload.new_payload_version == 1: + # V1 (Paris): Just execution payload + new_payload_args = [built_execution_payload] + elif payload.new_payload_version == 2: + # V2 (Shanghai): Just execution payload (withdrawals are inside it) + new_payload_args = [built_execution_payload] + elif payload.new_payload_version == 3: + # V3 (Cancun): execution_payload, blob_hashes, parent_beacon_block_root + blob_hashes = ( + built_payload_response.blobs_bundle.blob_versioned_hashes() + if built_payload_response.blobs_bundle is not None + else [] + ) + pbr = ( + payload.params[2] + if len(payload.params) >= 3 + else parent_beacon_block_root + ) + new_payload_args = [ + built_execution_payload, + blob_hashes, + pbr, # parent_beacon_block_root from fixture + ] + elif payload.new_payload_version in [4, 5]: + # V4/V5 (Prague/Osaka): + execution_requests + blob_hashes = ( + built_payload_response.blobs_bundle.blob_versioned_hashes() + if built_payload_response.blobs_bundle is not None + else [] + ) + execution_requests = ( + built_payload_response.execution_requests + if built_payload_response.execution_requests is not None + else [] + ) + pbr = ( + payload.params[2] + if len(payload.params) >= 3 + else parent_beacon_block_root + ) + new_payload_args = [ + built_execution_payload, + blob_hashes, + pbr, # parent_beacon_block_root from fixture + execution_requests, + ] + else: + raise LoggedError( + f"Unsupported newPayload version: {payload.new_payload_version}" + ) + + new_payload_response = engine_rpc.new_payload( + *new_payload_args, + version=payload.new_payload_version, + ) + + if new_payload_response.status != PayloadStatusEnum.VALID: + raise LoggedError( + f"Client rejected its own built block! " + f"Status: {new_payload_response.status}, " + f"Validation error: {new_payload_response.validation_error}" + ) + + logger.info("Client accepted its own built block!") + + # Step 6: Finalize the block + with payload_timing.time( + f"engine_forkchoiceUpdatedV{payload.forkchoice_updated_version} (finalize)" + ): + logger.info("Finalizing built block...") + + forkchoice_response = engine_rpc.forkchoice_updated( + forkchoice_state=ForkchoiceState( + head_block_hash=built_execution_payload.block_hash, + ), + payload_attributes=None, + version=payload.forkchoice_updated_version, + ) + + if forkchoice_response.payload_status.status != PayloadStatusEnum.VALID: + raise LoggedError( + f"Forkchoice update for finalization failed: {forkchoice_response}" + ) + + logger.info("Block finalized successfully!") + + # Step 7: Verify transaction executed successfully + with payload_timing.time("Verify transaction execution"): + logger.info("Checking transaction receipt...") + receipt = eth_rpc.get_transaction_receipt(tx_hash) + + if receipt is None: + raise LoggedError( + f"No receipt for transaction {tx_hash}. " + f"Transaction may not have been included in the finalized block." + ) + + if receipt["status"] != "0x1": + raise LoggedError( + f"Transaction {tx_hash} reverted (status: {receipt['status']})! " + f"This indicates the production code failed to initialize properly " + f"(like the Erigon bug with beacon roots not being initialized). " + f"The transaction expected system contracts to be ready." + ) + + logger.info("Transaction executed successfully!") + + logger.info("All blocks produced and verified successfully!")