Skip to content

Commit

Permalink
Merge branch 'main' into combine_builders
Browse files Browse the repository at this point in the history
  • Loading branch information
crecine authored Mar 15, 2024
2 parents 53eec37 + fd4e5dd commit 23f68e9
Show file tree
Hide file tree
Showing 5 changed files with 341 additions and 215 deletions.
152 changes: 152 additions & 0 deletions .github/actions/prepare_environment/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
name: Prepare Environment

inputs:
NAME:
required: true
type: string
PY:
required: true
type: string
NUMPY:
required: true
type: string
SCIPY:
required: true
type: string
PYOPTSPARSE:
required: true
type: string
SNOPT:
required: true
type: string
OPENMDAO:
required: true
type: string
DYMOS:
required: true
type: string
SSH_PRIVATE_KEY:
required: true
SSH_KNOWN_HOSTS:
required: true
SNOPT_LOCATION_77:
required: true

runs:
using: "composite"
steps:
- name: Display run details
shell: bash
run: |
echo "============================================================="
echo "Run #${GITHUB_RUN_NUMBER}"
echo "Run ID: ${GITHUB_RUN_ID}"
echo "Testing: ${GITHUB_REPOSITORY}"
echo "Triggered by: ${GITHUB_EVENT_NAME}"
echo "Initiated by: ${GITHUB_ACTOR}"
echo "============================================================="
- name: Create SSH key
shell: bash
env:
SSH_PRIVATE_KEY: ${{inputs.SSH_PRIVATE_KEY}}
SSH_KNOWN_HOSTS: ${{inputs.SSH_KNOWN_HOSTS}}
run: |
mkdir -p ~/.ssh/
echo "$SSH_PRIVATE_KEY" > ~/.ssh/id_rsa
sudo chmod 600 ~/.ssh/id_rsa
echo "$SSH_KNOWN_HOSTS" > ~/.ssh/known_hosts
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
python-version: ${{ inputs.PY }}
channels: conda-forge

- name: Install dependencies
shell: bash -l {0}
run: |
echo "============================================================="
echo "Install dependencies"
echo "============================================================="
conda install numpy=${{ inputs.NUMPY }} scipy=${{ inputs.SCIPY }} -q -y
conda install matplotlib pandas panel hvplot -q -y
pip install testflo -q
- name: Install pyOptSparse
if: inputs.PYOPTSPARSE
shell: bash -l {0}
run: |
echo "============================================================="
echo "Install pyoptsparse"
echo "============================================================="
if [[ "${{ inputs.SNOPT }}" == "7.7" && "${{ inputs.SNOPT_LOCATION_77 }}" ]]; then
mkdir SNOPT
echo " > Secure copying SNOPT 7.7 over SSH"
scp -qr ${{ inputs.SNOPT_LOCATION_77 }} SNOPT
SNOPT="-s SNOPT/src"
elif [[ "${{ inputs.SNOPT }}" ]]; then
echo "SNOPT version ${{ inputs.SNOPT }} was requested but source is not available"
fi
conda config --add channels conda-forge
pip install git+https://github.com/OpenMDAO/build_pyoptsparse
build_pyoptsparse -v -b ${{ inputs.PYOPTSPARSE }} $SNOPT
- name: Install OpenMDAO
if: inputs.OPENMDAO
shell: bash -l {0}
run: |
echo "============================================================="
echo "Install OpenMDAO"
echo "============================================================="
if [[ "${{ inputs.OPENMDAO }}" == "dev" ]]; then
pip install git+https://github.com/OpenMDAO/OpenMDAO
elif [[ "${{ inputs.OPENMDAO }}" == "latest" ]]; then
pip install openmdao
else
pip install openmdao==${{ inputs.OPENMDAO }}
fi
- name: Install Dymos
if: inputs.DYMOS
shell: bash -l {0}
run: |
echo "============================================================="
echo "Install Dymos"
echo "============================================================="
if [[ "${{ inputs.DYMOS }}" == "dev" ]]; then
pip install git+https://github.com/OpenMDAO/Dymos
elif [[ "${{ inputs.DYMOS }}" == "latest" ]]; then
pip install dymos
else
pip install dymos==${{ inputs.DYMOS }}
fi
- name: Checkout Aviary
uses: actions/checkout@v4

- name: Install Aviary
shell: bash -l {0}
run: |
echo "============================================================="
echo "Install Aviary"
echo "============================================================="
pip install -e .[all]
- name: Display conda environment info
shell: bash -l {0}
run: |
conda info
conda list
conda env export --name ${{ inputs.NAME }}_env --file ${{ inputs.NAME }}_environment.yml
- name: 'Upload environment artifact'
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.NAME }}_environment
path: ${{ inputs.NAME }}_environment.yml
retention-days: 5

60 changes: 60 additions & 0 deletions .github/workflows/test_benchmarks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# Run Tests

name: Test Benchmarks

on:
# Trigger on push or pull request events for the main branch
push:
branches: [ main ]
pull_request:
branches: [ main ]
merge_group:
branches: [ main ]

# Allow running the workflow manually from the Actions tab
workflow_dispatch:


jobs:

latest_benchmarks:
runs-on: ubuntu-latest
timeout-minutes: 90

steps:
- name: Checkout actions
uses: actions/checkout@v3
with:
sparse-checkout: |
.github/actions
path: actions

- name: prepare_benchmark_environment
uses: ./actions/.github/actions/prepare_environment
with:
NAME: 'latest'
PY: 3
NUMPY: 1
SCIPY: 1
PYOPTSPARSE: 'v2.9.1'
SNOPT: '7.7'
OPENMDAO: 'latest'
DYMOS: 'latest'
SSH_PRIVATE_KEY: ${{secrets.SSH_PRIVATE_KEY}}
SSH_KNOWN_HOSTS: ${{secrets.SSH_KNOWN_HOSTS}}
SNOPT_LOCATION_77: ${{ secrets.SNOPT_LOCATION_77 }}

- name: Run benchmarks
shell: bash -l {0}
run: |
echo "============================================================="
echo "Run Benchmarks"
echo "============================================================="
testflo . --testmatch=bench_test*
- name: Checkout actions (again)
uses: actions/checkout@v3
with:
sparse-checkout: |
.github/actions
path: actions
97 changes: 97 additions & 0 deletions .github/workflows/test_docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
# Run Tests

name: Test Docs

on:
# Trigger on push or pull request events for the main branch
push:
branches: [ main ]
pull_request:
branches: [ main ]
pull_request_target:
branches: [ main ]
merge_group:
branches: [ main ]

# Allow running the workflow manually from the Actions tab
workflow_dispatch:


jobs:

latest_docs:
runs-on: ubuntu-latest
timeout-minutes: 90

steps:
- name: Checkout actions
uses: actions/checkout@v3
with:
sparse-checkout: |
.github/actions
path: actions

- name: prepare_docs_environment
uses: ./actions/.github/actions/prepare_environment
with:
NAME: 'latest'
PY: 3
NUMPY: 1
SCIPY: 1
PYOPTSPARSE: 'v2.9.1'
SNOPT: '7.7'
OPENMDAO: 'latest'
DYMOS: 'latest'
SSH_PRIVATE_KEY: ${{secrets.SSH_PRIVATE_KEY}}
SSH_KNOWN_HOSTS: ${{secrets.SSH_KNOWN_HOSTS}}
SNOPT_LOCATION_77: ${{ secrets.SNOPT_LOCATION_77 }}

- name: Build docs
shell: bash -l {0}
run: |
pip install -U jupyter-book
cd aviary/docs
echo "============================================================="
echo "Build the docs"
echo "============================================================="
bash build_book.sh
- name: Display doc build reports
continue-on-error: True
shell: bash -l {0}
run: |
cd $HOME/work/Aviary/Aviary/aviary/docs
find _build/html/reports/ -type f -name '*.log' \
-exec echo "#################################################################" \; \
-exec echo {} \; \
-exec echo "#################################################################" \; \
-exec cat {} \;
- name: Publish docs to github.io
if: |
github.event_name == 'push' && github.ref == 'refs/heads/main'
shell: bash -l {0}
run: |
echo "============================================================="
echo "Publishing Docs to github.io"
echo "============================================================="
pip install ghp-import
cd $HOME/work/Aviary/Aviary/aviary
ghp-import -n -p -f docs/_build/html
- name: Scan for security issues
shell: bash -l {0}
run: |
python -m pip install bandit
echo "============================================================="
echo "Run bandit scan for medium/high severity issues"
echo "============================================================="
cd $GITHUB_WORKSPACE
python -m bandit -r -ll aviary
- name: Checkout actions (again)
uses: actions/checkout@v3
with:
sparse-checkout: |
.github/actions
path: actions
Loading

0 comments on commit 23f68e9

Please sign in to comment.