Skip to content

README.md

README.md #7

Workflow file for this run

name: Airflow CI Workflow
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
# Step 1: Checkout code
- name: Checkout code
uses: actions/checkout@v2
# Step 2: Set up Python environment
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.8'
# Step 3: Set up Postgres service (required for Airflow)
- name: Setup PostgreSQL
uses: Harmon758/postgresql-action@v1.0.0
with:
postgresql_version: '13'
postgresql_db: 'airflow'
postgresql_user: 'airflow'
postgresql_password: 'airflow'
# Step 4: Set AIRFLOW_HOME explicitly (important for db init)
- name: Set environment variables
run: |
echo "AIRFLOW_HOME=$GITHUB_WORKSPACE/airflow" >> $GITHUB_ENV
echo "AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@localhost/airflow" >> $GITHUB_ENV
echo "AIRFLOW__CORE__LOAD_EXAMPLES=False" >> $GITHUB_ENV
# Step 5: Install dependencies (Airflow, Astro)
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install apache-airflow[postgres]==2.10.2
pip install astro-sdk-python pandas beautifulsoup4
# Step 6: Wait for PostgreSQL to be ready
- name: Wait for PostgreSQL to be ready
run: sleep 10 # Wait for 10 seconds
# Step 7: Run airflow db migration
- name: Migrate Airflow DB
run: |
airflow db migrate
# Step 8: Check Airflow DB status
- name: Check Airflow DB status
run: |
airflow db check
# Step 9: Test DAG import (to ensure there are no import errors)
- name: Test DAG Import
run: |
airflow dags list
# Step 10: Validate the DAG (this ensures your DAG syntax is correct)
- name: Validate DAGs
run: |
airflow dags validate -f dags/wikiflow.py
# Step 11: Trigger DAG Run
- name: Trigger DAG run
run: |
airflow dags trigger wikiflow