From cf6c056e64db275ebadf6346d8c2767774ea1f38 Mon Sep 17 00:00:00 2001 From: Sorin Sbarnea Date: Mon, 20 May 2024 17:51:50 +0100 Subject: [PATCH] Refactor codecov to perform a single upload and use OIDC authentication (#377) - Archive coverage report from each job - Combine coverage reports inside check job - Make a single upload to codecov.io - Replace use of CODECOV token with OIDC authentication - Remove 'test' GHA environment as we no longer need it --- .github/workflows/tox.yml | 91 +++++++++++++++++++++++++++++---------- test/test_runtime.py | 10 +++-- tox.ini | 31 ++++++------- 3 files changed, 92 insertions(+), 40 deletions(-) diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml index 26df89b9..705cba03 100644 --- a/.github/workflows/tox.yml +++ b/.github/workflows/tox.yml @@ -12,8 +12,8 @@ on: workflow_call: jobs: - pre: - name: pre + prepare: + name: prepare runs-on: ubuntu-22.04 outputs: matrix: ${{ steps.generate_matrix.outputs.matrix }} @@ -42,20 +42,20 @@ jobs: macos: minmax build: name: ${{ matrix.name }} - environment: test + runs-on: ${{ matrix.os || 'ubuntu-22.04' }} - needs: pre + needs: prepare strategy: fail-fast: false - matrix: ${{ fromJson(needs.pre.outputs.matrix) }} + matrix: ${{ fromJson(needs.prepare.outputs.matrix) }} env: FORCE_COLOR: 1 - + PYTEST_REQPASS: 94 steps: - - name: Check out src from Git - uses: actions/checkout@v4 + - uses: actions/checkout@v4 with: fetch-depth: 0 # needed by setuptools-scm + submodules: true - name: Set up Python ${{ matrix.python_version }} uses: actions/setup-python@v5 @@ -80,37 +80,79 @@ jobs: - name: Initialize tox envs run: python3 -m tox --notest --skip-missing-interpreters false -vv -e ${{ matrix.passed_name }} - - name: Test with tox + - name: tox -e ${{ matrix.passed_name }} run: python3 -m tox -e ${{ matrix.passed_name }} - name: Archive logs uses: actions/upload-artifact@v4 with: name: logs-${{ matrix.name }}.zip - path: .tox/**/log/ + path: | + .tox/**/log/ + .tox/**/.coverage* + .tox/**/coverage.xml - - name: Upload coverage data - if: ${{ startsWith(matrix.passed_name, 'py') }} - uses: codecov/codecov-action@v4 - with: - name: ${{ matrix.passed_name }} - token: ${{ secrets.CODECOV_TOKEN }} - fail_ci_if_error: true # see https://github.com/codecov/codecov-action/issues/598 - verbose: true # optional (default = false) + - name: Report failure if git reports dirty status + run: | + if [[ -n $(git status -s) ]]; then + # shellcheck disable=SC2016 + echo -n '::error file=git-status::' + printf '### Failed as git reported modified and/or untracked files\n```\n%s\n```\n' "$(git status -s)" | tee -a "$GITHUB_STEP_SUMMARY" + exit 99 + fi + # https://github.com/actions/toolkit/issues/193 - check: # This job does nothing and is only used for the branch protection + check: if: always() + permissions: + id-token: write + checks: read needs: - build - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest + steps: + # checkout needed for codecov action which needs codecov.yml file + - uses: actions/checkout@v4 + + - name: Set up Python # likely needed for coverage + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - run: pip3 install 'coverage>=7.5.1' + - name: Merge logs into a single archive uses: actions/upload-artifact/merge@v4 with: name: logs.zip - pattern: logs*.zip - delete-merged: true + pattern: logs-*.zip + # artifacts like py312.zip and py312-macos do have overlapping files + separate-directories: true + + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: logs.zip + path: . + + - name: Check for expected number of coverage.xml reports + run: | + JOBS_PRODUCING_COVERAGE=14 + if [ "$(find . -name coverage.xml | wc -l | bc)" -ne "${JOBS_PRODUCING_COVERAGE}" ]; then + echo "::error::Number of coverage.xml files was not the expected one (${JOBS_PRODUCING_COVERAGE}): $(find . -name coverage.xml |xargs echo)" + exit 1 + fi + + - name: Upload coverage data + uses: codecov/codecov-action@v4 + with: + name: ${{ matrix.passed_name }} + # token: ${{ secrets.CODECOV_TOKEN }} + # verbose: true # optional (default = false) + fail_ci_if_error: true + use_oidc: true # cspell:ignore oidc - name: Check codecov.io status if: github.event_name == 'pull_request' @@ -120,3 +162,8 @@ jobs: uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} + + - name: Delete Merged Artifacts + uses: actions/upload-artifact/merge@v4 + with: + delete-merged: true diff --git a/test/test_runtime.py b/test/test_runtime.py index 0823f603..1b7d454c 100644 --- a/test/test_runtime.py +++ b/test/test_runtime.py @@ -532,11 +532,14 @@ def test_install_galaxy_role_unlink( caplog: pytest.LogCaptureFixture, ) -> None: """Test ability to unlink incorrect symlinked roles.""" - runtime_tmp = Runtime(verbosity=1) + runtime_tmp = Runtime(verbosity=1, isolated=True) runtime_tmp.prepare_environment() + assert runtime_tmp.cache_dir is not None pathlib.Path(f"{runtime_tmp.cache_dir}/roles").mkdir(parents=True, exist_ok=True) - pathlib.Path(f"{runtime_tmp.cache_dir}/roles/acme.get_rich").symlink_to("/dev/null") - pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() + roledir = pathlib.Path(f"{runtime_tmp.cache_dir}/roles/acme.get_rich") + if not roledir.exists(): + roledir.symlink_to("/dev/null") + pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir(exist_ok=True) pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( """galaxy_info: role_name: get_rich @@ -546,6 +549,7 @@ def test_install_galaxy_role_unlink( ) runtime_tmp._install_galaxy_role(runtime_tmp.project_dir) assert "symlink to current repository" in caplog.text + pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").unlink() def test_install_galaxy_role_bad_namespace(runtime_tmp: Runtime) -> None: diff --git a/tox.ini b/tox.ini index 6f6c82bd..4135d030 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -minversion = 4.0.0 +minversion = 4.6.3 envlist = lint pkg @@ -49,45 +49,46 @@ deps = extras = test + +commands_pre = + # safety measure to assure we do not accidentally run tests with broken dependencies + {envpython} -m pip check + # cleaning needed to prevent errors between runs + sh -c "rm -f {envdir}/.coverage.* 2>/dev/null || true" commands = sh -c "ansible --version | head -n 1" # We add coverage options but not making them mandatory as we do not want to force # pytest users to run coverage when they just want to run a single test with `pytest -k test` coverage run -m pytest {posargs:} - sh -c "coverage combine -a -q --data-file=.coverage {toxworkdir}/.coverage.*" # needed for upload to codecov.io - -sh -c "COVERAGE_FILE= coverage xml --ignore-errors -q --fail-under=0" - # needed for vscode integration due to https://github.com/ryanluker/vscode-coverage-gutters/issues/403 - -sh -c "COVERAGE_FILE= coverage lcov --ignore-errors -q --fail-under=0" - sh -c "COVERAGE_FILE= coverage report" + {py,py39,py310,py311,py312,py313}: sh -c "coverage combine -a -q --data-file={envdir}/.coverage {envdir}/.coverage.* && coverage xml --data-file={envdir}/.coverage -o {envdir}/coverage.xml --ignore-errors --fail-under=0 && COVERAGE_FILE={envdir}/.coverage coverage lcov --fail-under=0 --ignore-errors -q && COVERAGE_FILE={envdir}/.coverage coverage report --fail-under=0 --ignore-errors" + # lcov needed for vscode integration due to https://github.com/ryanluker/vscode-coverage-gutters/issues/403 # We fail if files are modified at the end git diff --exit-code -commands_pre = - # safety measure to assure we do not accidentally run tests with broken dependencies - {envpython} -m pip check - # cleaning needed to prevent errors between runs - sh -c "rm -f .coverage {toxworkdir}/.coverage.* 2>/dev/null || true" passenv = CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437 FORCE_COLOR HOME NO_COLOR PYTEST_* # allows developer to define their own preferences + PYTEST_REQPASS # needed for CI + PYTHON* # PYTHONPYCACHEPREFIX, PYTHONIOENCODING, PYTHONBREAKPOINT,... PY_COLORS + RTD_TOKEN REQUESTS_CA_BUNDLE # https proxies + SETUPTOOLS_SCM_DEBUG SSL_CERT_FILE # https proxies + SSH_AUTH_SOCK # may be needed by git LANG - LC_ALL - LC_CTYPE + LC_* setenv = ANSIBLE_DEVEL_WARNING='false' - COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}} + COVERAGE_FILE = {env:COVERAGE_FILE:{envdir}/.coverage.{envname}} COVERAGE_PROCESS_START={toxinidir}/pyproject.toml PIP_DISABLE_PIP_VERSION_CHECK = 1 PIP_CONSTRAINT = {toxinidir}/.config/constraints.txt PRE_COMMIT_COLOR = always - PYTEST_REQPASS = 94 FORCE_COLOR = 1 allowlist_externals = ansible