Skip to content

Scrape and publish #12057

Scrape and publish

Scrape and publish #12057

Workflow file for this run

name: Scrape and publish
on:
push:
branches: [ master ]
schedule:
# every day after midnight, UTC
- cron: '1 * * * *'
jobs:
scrape-and-publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.11
uses: actions/setup-python@v3
with:
python-version: "3.11"
- name: Install dependencies
run: |
pip install -r requirements.txt
sudo apt-get install -y jq
- name: Remember current time
id: time
run: echo "DATE=$(date --utc)" >> $GITHUB_OUTPUT
- name: Remember checksum of the data before update
id: data-before
run: echo "MD5=$(md5sum data.json)" >> $GITHUB_OUTPUT
- name: Scrape data
run: ./scraper.py > data.raw
- name: Prettify JSON
run: jq --sort-keys . data.raw > data.json
- name: Get checksum of the data after update
id: data-after
run: echo "MD5=$(md5sum data.json)" >> $GITHUB_OUTPUT
- name: Update checking timestamp
run: sed -i 's/Last Prices Check.*/Last Prices Check <strong>${{ steps.time.outputs.DATE }}/g' index.html
- uses: EndBug/add-and-commit@v9
with:
add: 'index.html data.json'