Skip to content

Update filterFragSpectra.R - update filenames - ensure as character #74

Update filterFragSpectra.R - update filenames - ensure as character

Update filterFragSpectra.R - update filenames - ensure as character #74

Workflow file for this run

name: Galaxy Tool Linting and Tests for push and PR
on: [push, pull_request]
env:
GALAXY_REPO: https://github.com/galaxyproject/galaxy
GALAXY_RELEASE: release_24.0
jobs:
# the setup job does two things:
# 1. cache the pip cache and .planemo
# 2. determine the list of changed repositories
# it produces one artifact which contains
# - a file with the latest SHA from the chosen branch of the Galaxy repo
# - a file containing the list of changed repositories
# which are needed in subsequent steps.
setup:
name: Setup cache and determine changed repositories
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.11]
steps:
- name: Print github context properties
run: |
echo 'event: ${{ github.event_name }}'
echo 'sha: ${{ github.sha }}'
echo 'ref: ${{ github.ref }}'
echo 'head_ref: ${{ github.head_ref }}'
echo 'base_ref: ${{ github.base_ref }}'
echo 'event.before: ${{ github.event.before }}'
echo 'event.after: ${{ github.event.after }}'
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Determine latest galaxy commit
run: echo "GALAXY_HEAD_SHA=$(git ls-remote ${{ env.GALAXY_REPO }} refs/heads/${{ env.GALAXY_RELEASE }} | cut -f1)" >> $GITHUB_ENV
- name: Save latest galaxy commit to artifact file
run: echo $GALAXY_HEAD_SHA > galaxy.sha
- uses: actions/upload-artifact@v4
with:
name: Workflow artifacts
path: galaxy.sha
- name: Cache .cache/pip
uses: actions/cache@v2
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy1_$GALAXY_HEAD_SHA
- name: Cache .planemo
uses: actions/cache@v2
id: cache-planemo
with:
path: ~/.planemo
key: planemo_cache_py_${{ matrix.python-version }}_gxy1_$GALAXY_HEAD_SHA
# Install the `wheel` package so that when installing other packages which
# are not available as wheels, pip will build a wheel for them, which can be cached.
- name: Install wheel
run: pip install wheel
- name: Install Planemo and flake8
run: pip install planemo flake8 flake8-import-order
- name: Fake a Planemo run to update cache
continue-on-error: true
if: steps.cache-pip.outputs.cache-hit != 'true' || steps.cache-planemo.outputs.cache-hit != 'true'
run: |
touch tool.xml
PIP_QUIET=2 planemo test --galaxy_python_version ${{ matrix.python-version }} --no_conda_auto_init --galaxy_source $GALAXY_REPO --galaxy_branch $GALAXY_RELEASE
- uses: actions/checkout@v2
with:
fetch-depth: 0
# The range of commits to check for changes is:
# - `origin/master...` for all events happening on a feature branch
# - for events on the master branch we compare against the sha before the event
# (note that this does not work for feature branch events since we want all
# commits on the feature branch and not just the commits of the last event)
# - for pull requests we compare against the 1st ancestor, given the current
# HEAD is the merge between the PR branch and the base branch
- name: Set commit range (push to the feature branch)
if: github.ref != 'refs/heads/master' && github.event_name == 'push'
run: |
git fetch origin master
echo "COMMIT_RANGE=origin/master..." >> $GITHUB_ENV
- name: Set commit range (push to the master branch, e.g. merge)
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: echo "COMMIT_RANGE=${{ github.event.before }}.." >> $GITHUB_ENV
- name: Set commit range (pull request)
if: github.event_name == 'pull_request'
run: echo "COMMIT_RANGE=HEAD~.." >> $GITHUB_ENV
- name: Planemo ci_find_repos
run: planemo ci_find_repos --changed_in_commit_range $COMMIT_RANGE --exclude packages --exclude deprecated --exclude_from .tt_skip --output changed_repositories.list
- name: Show repo list
run: cat changed_repositories.list
- uses: actions/upload-artifact@v4
with:
name: Workflow artifacts
path: changed_repositories.list
# Planemo lint the changed repositories
lint:
name: Lint tools
needs: setup
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [3.11]
steps:
# checkout the repository
# and use it as the current working directory
- uses: actions/checkout@v2
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- uses: actions/download-artifact@v4
with:
name: Workflow artifacts
path: ../workflow_artifacts/
- name: Determine latest galaxy commit
run: echo "GALAXY_HEAD_SHA=$(cat ../workflow_artifacts/galaxy.sha)" >> $GITHUB_ENV
- name: Cache .cache/pip
uses: actions/cache@v2
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy1_$GALAXY_HEAD_SHA
- name: Install Planemo
run: pip install planemo
- name: Planemo lint
run: |
set -e
while read -r DIR; do
planemo shed_lint --tools --ensure_metadata --urls --report_level warn --fail_level error --recursive "$DIR";
done < ../workflow_artifacts/changed_repositories.list
# flake8 of Python scripts in the changed repositories
flake8:
name: Lint Python scripts
needs: setup
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [3.11]
steps:
# checkout the repository to master
# and use it as the current working directory
- uses: actions/checkout@v2
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- uses: actions/download-artifact@v4
with:
name: Workflow artifacts
path: ../workflow_artifacts/
- name: Determine latest galaxy commit
run: echo "GALAXY_HEAD_SHA=$(cat ../workflow_artifacts/galaxy.sha)" >> $GITHUB_ENV
- name: Cache .cache/pip
uses: actions/cache@v2
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy1_$GALAXY_HEAD_SHA
- name: Install flake8
run: pip install flake8 flake8-import-order
- name: Flake8
run: |
if [ -s ../workflow_artifacts/changed_repositories.list ]; then
flake8 $(cat ../workflow_artifacts/changed_repositories.list)
fi
# Planemo test the changed repositories, each chunk creates an artifact
# containing HTML and JSON reports for the executed tests
test:
name: Test tools
# This job runs on Linux
runs-on: ubuntu-latest
needs: setup
strategy:
fail-fast: false
matrix:
chunk: [0]
python-version: [3.11]
services:
postgres:
image: postgres:11
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
# checkout the repository
# and use it as the current working directory
- uses: actions/checkout@v2
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- uses: actions/download-artifact@v4
with:
name: Workflow artifacts
path: ../workflow_artifacts/
- name: Determine latest galaxy commit
run: echo "GALAXY_HEAD_SHA=$(cat ../workflow_artifacts/galaxy.sha)" >> $GITHUB_ENV
- name: Cache .cache/pip
uses: actions/cache@v2
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy1_$GALAXY_HEAD_SHA
- name: Cache .planemo
uses: actions/cache@v2
id: cache-planemo
with:
path: ~/.planemo
key: planemo_cache_py_${{ matrix.python-version }}_gxy1_$GALAXY_HEAD_SHA
- name: Install Planemo
run: pip install planemo
- name: Planemo ci_find_tools
run: |
touch changed_repositories_chunk.list changed_tools_chunk.list
if [ -s ../workflow_artifacts/changed_repositories.list ]; then
if [ $(wc -l < ../workflow_artifacts/changed_repositories.list) -eq 1 ]; then
planemo ci_find_tools --chunk_count 1 --chunk ${{ matrix.chunk }} \
--output changed_tools_chunk.list \
$(cat ../workflow_artifacts/changed_repositories.list)
else
planemo ci_find_repos --chunk_count 1 --chunk ${{ matrix.chunk }} \
--output changed_repositories_chunk.list \
$(cat ../workflow_artifacts/changed_repositories.list)
fi
fi
- name: Show changed tools/repositories chunk list
run: cat changed_tools_chunk.list changed_repositories_chunk.list
- name: Planemo test tools
run: |
if grep -lqf .tt_biocontainer_skip changed_tools_chunk.list changed_repositories_chunk.list; then
PLANEMO_OPTIONS=""
else
PLANEMO_OPTIONS="--biocontainers --no_dependency_resolution --no_conda_auto_init"
fi
if [ -s changed_tools_chunk.list ]; then
PIP_QUIET=1 planemo test --database_connection postgresql://postgres:postgres@localhost:5432/galaxy $PLANEMO_OPTIONS --galaxy_source $GALAXY_REPO --galaxy_branch $GALAXY_RELEASE --galaxy_python_version ${{ matrix.python-version }} --test_output_json tool_test_output.json $(cat changed_tools_chunk.list) || true
docker system prune --all --force --volumes || true
elif [ -s changed_repositories_chunk.list ]; then
while read -r DIR; do
if [[ "$DIR" =~ ^data_managers.* ]]; then
TESTPATH=$(planemo ci_find_tools "$DIR")
else
TESTPATH="$DIR"
fi
PIP_QUIET=1 planemo test --database_connection postgresql://postgres:postgres@localhost:5432/galaxy $PLANEMO_OPTIONS --galaxy_source $GALAXY_REPO --galaxy_branch $GALAXY_RELEASE --galaxy_python_version ${{ matrix.python-version }} --test_output_json "$DIR"/tool_test_output.json "$TESTPATH" || true
docker system prune --all --force --volumes || true
done < changed_repositories_chunk.list
else
echo '{"tests":[]}' > tool_test_output.json
fi
- name: Merge tool_test_output.json files
run: find . -name tool_test_output.json -exec sh -c 'planemo merge_test_reports "$@" tool_test_output.json' sh {} +
- name: Create tool_test_output.html
run: planemo test_reports tool_test_output.json --test_output tool_test_output.html
- name: Copy artifacts into place
run: |
mkdir upload
mv tool_test_output.json tool_test_output.html upload/
- uses: actions/upload-artifact@v4
with:
name: 'Tool test output ${{ matrix.chunk }}'
path: upload
# - combine the results of the test chunks (which will never fail due
# to `|| true`) and create a global test report as json and html which
# is provided as artifact
# - check if any tool test actually failed (by lookup in the combined json)
# and fail this step if this is the case
combine_outputs:
name: Combine chunked test results
needs: test
strategy:
matrix:
python-version: [3.11]
# This job runs on Linux
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
with:
path: artifacts
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Determine latest galaxy commit
run: echo "GALAXY_HEAD_SHA=$(cat ../workflow_artifacts/galaxy.sha)" >> $GITHUB_ENV
- name: Cache .cache/pip
uses: actions/cache@v2
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy1_$GALAXY_HEAD_SHA
- name: Install Planemo
run: pip install planemo
- name: Install jq
run: sudo apt-get install jq
- name: Combine outputs
run: find artifacts/ -name tool_test_output.json -exec sh -c 'planemo merge_test_reports "$@" tool_test_output.json' sh {} +
- name: Create tool_test_output.html
run: planemo test_reports tool_test_output.json --test_output tool_test_output.html
- name: Copy artifacts into place
run: |
mkdir upload
mv tool_test_output.json tool_test_output.html upload/
- uses: actions/upload-artifact@v4
with:
name: 'All tool test results'
path: upload
- name: Check status of combined status
run: |
if jq '.["tests"][]["data"]["status"]' upload/tool_test_output.json | grep -v "success"; then
echo "Unsuccessful tests found, inspect the 'All tool test results' artifact for details."
exit 1
fi
# deploy the tools to the toolsheds (first TTS for testing)
deploy:
name: Deploy
needs: [lint,flake8,combine_outputs]
strategy:
matrix:
python-version: [3.11]
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/master' && github.repository_owner == 'computational-metabolomics'
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- uses: actions/download-artifact@v4
with:
name: Workflow artifacts
path: ../workflow_artifacts/
- name: Determine latest galaxy commit
run: echo "GALAXY_HEAD_SHA=$(cat ../workflow_artifacts/galaxy.sha)" >> $GITHUB_ENV
- name: Cache .cache/pip
uses: actions/cache@v2
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_$GALAXY_HEAD_SHA
- name: Install Planemo
run: pip install planemo
- name: Deploy on testtoolshed
env:
SHED_KEY: ${{ secrets.tts_api_key }}
run: |
while read -r DIR; do
planemo shed_update --shed_target testtoolshed --shed_key "${{ env.SHED_KEY }}" --force_repository_creation "$DIR" || exit 1;
done < ../workflow_artifacts/changed_repositories.list
continue-on-error: true
- name: Deploy on toolshed
env:
SHED_KEY: ${{ secrets.ts_api_key }}
run: |
while read -r DIR; do
planemo shed_update --shed_target toolshed --shed_key "${{ env.SHED_KEY }}" --force_repository_creation "$DIR" || exit 1;
done < ../workflow_artifacts/changed_repositories.list