Skip to content

update readme

update readme #8

Workflow file for this run

name: Run Scrapy Spiders
on:
push:
branches: [master]
pull_request:
branches: [master]
schedule:
- cron: "0 0 * * *" # Run daily at midnight UTC
jobs:
scrape:
permissions:
contents: write
pull-requests: write
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
token: ${{ secrets.MEONG }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.head_ref }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.x"
cache: "pip"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt scrapy
playwright install
- name: Run scraping script
run: ./scrape.sh
- name: Move CSV files to public directory
run: |
mkdir -p public
mv output/*.csv public/
- name: Create Pull Request
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "[skip ci] - updated"
- name: Upload CSV files as artifact
uses: actions/upload-artifact@v3
with:
name: scraped-data
path: public/*.csv