Skip to content

Commit

Permalink
Revert "ci(release): don't download benchmark artifacts in build_docs…
Browse files Browse the repository at this point in the history
….py (MODFLOW-USGS#2147)"

This reverts commit 9180c2c.
  • Loading branch information
wpbonelli committed Jan 30, 2025
1 parent 3238386 commit ed1d5fd
Showing 1 changed file with 72 additions and 1 deletion.
73 changes: 72 additions & 1 deletion distribution/build_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import shutil
import sys
import textwrap
from datetime import datetime
from os import PathLike, environ
from pathlib import Path
from pprint import pprint
Expand All @@ -14,12 +15,15 @@

import pytest
from benchmark import run_benchmarks
from flaky import flaky
from modflow_devtools.build import meson_build
from modflow_devtools.download import (
download_and_unzip,
download_artifact,
get_release,
list_artifacts,
)
from modflow_devtools.markers import no_parallel, requires_exe
from modflow_devtools.markers import no_parallel, requires_exe, requires_github
from modflow_devtools.misc import run_cmd, run_py_script, set_dir

from utils import assert_match, convert_line_endings, get_project_root_path, glob, match
Expand Down Expand Up @@ -67,20 +71,73 @@
]


def download_benchmarks(
output_path: PathLike,
verbose: bool = False,
repo_owner: str = "MODFLOW-USGS",
) -> Optional[Path]:
"""Try to download MF6 benchmarks from GitHub Actions."""

output_path = Path(output_path).expanduser().absolute()
name = "run-time-comparison" # todo make configurable
repo = f"{repo_owner}/modflow6" # todo make configurable, add pytest/cli args
artifacts = list_artifacts(repo, name=name, verbose=verbose)
artifacts = sorted(
artifacts,
key=lambda a: datetime.strptime(a["created_at"], "%Y-%m-%dT%H:%M:%SZ"),
reverse=True,
)
artifacts = [
a
for a in artifacts
if a["workflow_run"]["head_branch"] == "develop" # todo make configurable
]
most_recent = next(iter(artifacts), None)
print(f"Found most recent benchmarks (artifact {most_recent['id']})")
if most_recent:
print(f"Downloading benchmarks (artifact {most_recent['id']})")
download_artifact(repo, id=most_recent["id"], path=output_path, verbose=verbose)
print(f"Downloaded benchmarks to {output_path}")
path = output_path / f"{name}.md"
assert path.is_file()
return path
else:
print("No benchmarks found")
return None


@pytest.fixture
def github_user() -> Optional[str]:
return environ.get("GITHUB_USER", None)


@flaky
@no_parallel
@requires_github
def test_download_benchmarks(tmp_path, github_user):
path = download_benchmarks(
tmp_path,
verbose=True,
repo_owner=github_user if github_user else "MODFLOW-USGS",
)
if path:
assert path.name == "run-time-comparison.md"


def build_benchmark_tex(
output_path: PathLike,
force: bool = False,
repo_owner: str = "MODFLOW-USGS",
):
"""Build LaTeX files for MF6 performance benchmarks to go into the release notes."""

BENCHMARKS_PATH.mkdir(parents=True, exist_ok=True)
benchmarks_path = BENCHMARKS_PATH / "run-time-comparison.md"

# download benchmark artifacts if any exist on GitHub
if not benchmarks_path.is_file():
benchmarks_path = download_benchmarks(BENCHMARKS_PATH, repo_owner=repo_owner)

# run benchmarks again if no benchmarks found on GitHub or overwrite requested
if force or not benchmarks_path.is_file():
run_benchmarks(
Expand All @@ -105,6 +162,20 @@ def build_benchmark_tex(
assert (RELEASE_NOTES_PATH / f"{benchmarks_path.stem}.tex").is_file()


@flaky
@no_parallel
@requires_github
def test_build_benchmark_tex(tmp_path):
benchmarks_path = BENCHMARKS_PATH / "run-time-comparison.md"
tex_path = DISTRIBUTION_PATH / f"{benchmarks_path.stem}.tex"

try:
build_benchmark_tex(tmp_path)
assert benchmarks_path.is_file()
finally:
tex_path.unlink(missing_ok=True)


def build_deprecations_tex(force: bool = False):
"""Build LaTeX files for the deprecations table to go into the release notes."""

Expand Down

0 comments on commit ed1d5fd

Please sign in to comment.