From 9180c2cc0c191d5e8347801c1b2b6e5904c6c849 Mon Sep 17 00:00:00 2001 From: wpbonelli Date: Thu, 16 Jan 2025 21:46:30 -0500 Subject: [PATCH] ci(release): don't download benchmark artifacts in build_docs.py (#2147) We could just do this in the release.yml workflow with actions/download-artifact, now that v4 supports downloads from arbitrary workflow runs. But we probably want to run fresh benchmarks at release time. Also remove some awkward tests for things that get exercised nightly in the release workflow anyway. --- distribution/build_docs.py | 73 +------------------------------------- 1 file changed, 1 insertion(+), 72 deletions(-) diff --git a/distribution/build_docs.py b/distribution/build_docs.py index 36d0c23d646..8621ff59fa6 100644 --- a/distribution/build_docs.py +++ b/distribution/build_docs.py @@ -4,7 +4,6 @@ import shutil import sys import textwrap -from datetime import datetime from os import PathLike, environ from pathlib import Path from pprint import pprint @@ -15,15 +14,12 @@ import pytest from benchmark import run_benchmarks -from flaky import flaky from modflow_devtools.build import meson_build from modflow_devtools.download import ( download_and_unzip, - download_artifact, get_release, - list_artifacts, ) -from modflow_devtools.markers import no_parallel, requires_exe, requires_github +from modflow_devtools.markers import no_parallel, requires_exe from modflow_devtools.misc import run_cmd, run_py_script, set_dir from utils import assert_match, convert_line_endings, get_project_root_path, glob, match @@ -71,73 +67,20 @@ ] -def download_benchmarks( - output_path: PathLike, - verbose: bool = False, - repo_owner: str = "MODFLOW-USGS", -) -> Optional[Path]: - """Try to download MF6 benchmarks from GitHub Actions.""" - - output_path = Path(output_path).expanduser().absolute() - name = "run-time-comparison" # todo make configurable - repo = f"{repo_owner}/modflow6" # todo make configurable, add pytest/cli args - artifacts = list_artifacts(repo, name=name, verbose=verbose) - artifacts = sorted( - artifacts, - key=lambda a: datetime.strptime(a["created_at"], "%Y-%m-%dT%H:%M:%SZ"), - reverse=True, - ) - artifacts = [ - a - for a in artifacts - if a["workflow_run"]["head_branch"] == "develop" # todo make configurable - ] - most_recent = next(iter(artifacts), None) - print(f"Found most recent benchmarks (artifact {most_recent['id']})") - if most_recent: - print(f"Downloading benchmarks (artifact {most_recent['id']})") - download_artifact(repo, id=most_recent["id"], path=output_path, verbose=verbose) - print(f"Downloaded benchmarks to {output_path}") - path = output_path / f"{name}.md" - assert path.is_file() - return path - else: - print("No benchmarks found") - return None - - @pytest.fixture def github_user() -> Optional[str]: return environ.get("GITHUB_USER", None) -@flaky -@no_parallel -@requires_github -def test_download_benchmarks(tmp_path, github_user): - path = download_benchmarks( - tmp_path, - verbose=True, - repo_owner=github_user if github_user else "MODFLOW-USGS", - ) - if path: - assert path.name == "run-time-comparison.md" - - def build_benchmark_tex( output_path: PathLike, force: bool = False, - repo_owner: str = "MODFLOW-USGS", ): """Build LaTeX files for MF6 performance benchmarks to go into the release notes.""" BENCHMARKS_PATH.mkdir(parents=True, exist_ok=True) benchmarks_path = BENCHMARKS_PATH / "run-time-comparison.md" - # download benchmark artifacts if any exist on GitHub - if not benchmarks_path.is_file(): - benchmarks_path = download_benchmarks(BENCHMARKS_PATH, repo_owner=repo_owner) - # run benchmarks again if no benchmarks found on GitHub or overwrite requested if force or not benchmarks_path.is_file(): run_benchmarks( @@ -162,20 +105,6 @@ def build_benchmark_tex( assert (RELEASE_NOTES_PATH / f"{benchmarks_path.stem}.tex").is_file() -@flaky -@no_parallel -@requires_github -def test_build_benchmark_tex(tmp_path): - benchmarks_path = BENCHMARKS_PATH / "run-time-comparison.md" - tex_path = DISTRIBUTION_PATH / f"{benchmarks_path.stem}.tex" - - try: - build_benchmark_tex(tmp_path) - assert benchmarks_path.is_file() - finally: - tex_path.unlink(missing_ok=True) - - def build_deprecations_tex(force: bool = False): """Build LaTeX files for the deprecations table to go into the release notes."""