Skip to content

Commit

Permalink
feat(fill): emphasize that no tests were executed during a fill pytes…
Browse files Browse the repository at this point in the history
…t session (#887)

* feat(forks): improve language; 'tests executed' -> 'generated'

* feat(fill): emphasize no (client) tests were ran in summary

* docs: update changelog

* fix(fill): remove custom fields from stats; this breaks pytester

* feat(fill): add more emphasis to 'No tests executed' summary line

* feat(fill): set custom verbose test report labels via plugin

The previous attempt in d27a0ab broke xfail behaviour; this plugin accounts for tests marked as xfail. This issue was visible in `src/pytest_plugins/forks/tests/test_covariant_markers.py::test_fork_covariant_markers[with_all_tx_types_with_marks_lambda_multiple_marks]`.

* docs: fix typo in changelog

* feat(cli): add long overdue alias for fill

This change requires the pytest-custom-report plugin.

* tests(phil): assert unicorns

* tests(phil): fix docstring in unicorn test
  • Loading branch information
danceratopz authored Oct 15, 2024
1 parent 619e7d8 commit 0900aa8
Show file tree
Hide file tree
Showing 8 changed files with 94 additions and 6 deletions.
1 change: 1 addition & 0 deletions docs/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ Test fixtures for use by clients are available for each release on the [Github r
- ✨ Pin EELS versions in `eels_resolutions.json` and include this file in fixture releases ([#872](https://github.com/ethereum/execution-spec-tests/pull/872)).
- 🔀 Replace `ethereum.base_types` with `ethereum-types` ([#850](https://github.com/ethereum/execution-spec-tests/pull/850)).
- 💥 `PragueEIP7692` fork has been renamed to `Osaka` ([#869](https://github.com/ethereum/execution-spec-tests/pull/869))
- ✨ Improve `fill` terminal output to emphasize that filling tests is not actually testing a client ([#807](https://github.com/ethereum/execution-spec-tests/pull/887)).

### 🔧 EVM Tools

Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ dependencies = [
"requests_unixsocket2>=0.4.0",
"colorlog>=6.7.0,<7",
"pytest>7.3.2,<8",
"pytest-custom-report>=1.0.1,<2",
"pytest-html>=4.1.0,<5",
"pytest-metadata>=3,<4",
"pytest-xdist>=3.3.1,<4",
Expand Down Expand Up @@ -81,6 +82,7 @@ docs = [

[project.scripts]
fill = "cli.pytest_commands.fill:fill"
phil = "cli.pytest_commands.fill:phil"
tf = "cli.pytest_commands.fill:tf"
checkfixtures = "cli.check_fixtures:check_fixtures"
consume = "cli.pytest_commands.consume:consume"
Expand Down
3 changes: 3 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,6 @@ addopts =
--tb short
--dist loadscope
--ignore tests/cancun/eip4844_blobs/point_evaluation_vectors/
# these customizations require the pytest-custom-report plugin
report_passed_verbose = FILLED
report_xpassed_verbose = XFILLED
29 changes: 29 additions & 0 deletions src/cli/pytest_commands/fill.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,32 @@ def fill(pytest_args: List[str], **kwargs) -> None:
),
)
sys.exit(result)


@click.command(context_settings=dict(ignore_unknown_options=True))
@common_click_options
def phil(pytest_args: List[str], **kwargs) -> None:
"""
A friendly alias for the fill command.
"""
args = handle_fill_command_flags(
["--index", *pytest_args],
)
result = pytest.main(
args
+ [
"-o",
"report_passed=🦄",
"-o",
"report_xpassed=🌈",
"-o",
"report_failed=👾",
"-o",
"report_xfailed=🦺",
"-o",
"report_skipped=🦘",
"-o",
"report_error=🚨",
],
)
sys.exit(result)
12 changes: 11 additions & 1 deletion src/cli/tests/test_pytest_fill_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

import pytest_plugins.filler.filler

from ..pytest_commands.fill import fill
from ..pytest_commands.fill import fill, phil


@pytest.fixture
Expand Down Expand Up @@ -191,3 +191,13 @@ def test_fill_html_and_output_options(
assert result.exit_code == pytest.ExitCode.OK
assert html_path.exists()
assert (output_dir / "state_tests").exists(), "No fixtures in output directory"


def test_phil_default_output_options(runner):
"""
A simple sanity test for phil.
"""
fill_args = ["-k", "test_dup and state_test-DUP16 and LEGACY", "--fork", "Frontier"]
result = runner.invoke(phil, fill_args)
assert result.exit_code == pytest.ExitCode.OK
assert "🦄" in result.output
36 changes: 33 additions & 3 deletions src/pytest_plugins/filler/filler.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from typing import Any, Dict, Generator, List, Type

import pytest
from _pytest.terminal import TerminalReporter
from filelock import FileLock
from pytest_metadata.plugin import metadata_key # type: ignore

Expand Down Expand Up @@ -270,10 +271,13 @@ def pytest_report_header(config: pytest.Config):

def pytest_report_teststatus(report, config: pytest.Config):
"""
Disable test session progress report if we're writing the JSON fixtures to
stdout to be read by a consume command on stdin. I.e., don't write this
type of output to the console:
This hook modifies the test results in pytest's terminal output.
We use this:
1. To disable test session progress report if we're writing the JSON
fixtures to stdout to be read by a consume command on stdin. I.e.,
don't write this type of output to the console:
```text
...x...
```
Expand All @@ -282,6 +286,32 @@ def pytest_report_teststatus(report, config: pytest.Config):
return report.outcome, "", report.outcome.upper()


@pytest.hookimpl(hookwrapper=True, trylast=True)
def pytest_terminal_summary(
terminalreporter: TerminalReporter, exitstatus: int, config: pytest.Config
):
"""
Modify pytest's terminal summary to emphasize that no tests were ran.
Emphasize that fixtures have only been filled; they must now be executed to
actually run the tests.
"""
yield
stats = terminalreporter.stats
if "passed" in stats and stats["passed"]:
# append / to indicate this is a directory
output_dir = str(strip_output_tarball_suffix(config.getoption("output"))) + "/"
terminalreporter.write_sep(
"=",
(
f' No tests executed - the test fixtures in "{output_dir}" may now be executed '
"against a client "
),
bold=True,
yellow=True,
)


def pytest_metadata(metadata):
"""
Add or remove metadata to/from the pytest report.
Expand Down
4 changes: 2 additions & 2 deletions src/pytest_plugins/forks/forks.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,15 +438,15 @@ def pytest_report_header(config, start_path):
header = [
(
bold
+ "Executing tests for: "
+ "Generating fixtures for: "
+ ", ".join([f.name() for f in sorted(list(config.fork_set))])
+ reset
),
]
if all(fork.is_deployed() for fork in config.fork_set):
header += [
(
bold + warning + "Only executing tests with stable/deployed forks: "
bold + warning + "Only generating fixtures with stable/deployed forks: "
"Specify an upcoming fork via --until=fork to "
"add forks under development." + reset
)
Expand Down
13 changes: 13 additions & 0 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 0900aa8

Please sign in to comment.