Skip to content

Clean up data scraping code #49

Clean up data scraping code

Clean up data scraping code #49

Workflow file for this run

# Name of workflow
name: Run Epiworld Forecast
# Controls when the workflow will run
on:
# Triggers the workflow on push or pull request events
push:
branches:
- "main"
pull_request:
branches:
- "main"
# Schedule action to run weekly
# at specific UTC times using POSIX cron syntax
# Set here to run at 6am UTC every Thursday (since
# DHHS dataset updates weekly on Wednesdays)
schedule:
- cron: '0 6 * * 4'
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: ubuntu-latest
container: ghcr.io/epiforesite/epiworld-forecasts
permissions:
contents: write
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out repository under $GITHUB_WORKSPACE, so the job can access it
- uses: actions/checkout@v4
- name: Render forecast quarto site
run: quarto render
- name: Setup GitHub Pages
uses: actions/configure-pages@v4
# Upload website artifact
- name: Upload artifacts
uses: actions/upload-pages-artifact@v3
with:
name: github-pages
path: build/html/
# Deploy to the github-pages environment
deploy:
# Don't deploy if PR
if: ${{ github.event_name != 'pull_request' }}
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
needs: build
# Specify runner + deployment step
runs-on: ubuntu-latest
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
with:
artifact_name: github-pages
preview: true