Skip to content

Run newsapi_articles pipeline from newsapi_pipeline.py #19

Run newsapi_articles pipeline from newsapi_pipeline.py

Run newsapi_articles pipeline from newsapi_pipeline.py #19

name: Run newsapi_articles pipeline from newsapi_pipeline.py
'on':
schedule:
- cron: 0 0 * * *
workflow_dispatch: null
env:
DESTINATION__FILESYSTEM__DATASET_NAME: newsapi_everything_ai
DESTINATION__FILESYSTEM__BUCKET_URL: ${{ secrets.BUCKET_URL }}
DESTINATION__FILESYSTEM__CREDENTIALS__AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
DESTINATION__FILESYSTEM__CREDENTIALS__AWS_SECRET_ACCESS_KEY: ${{ secrets.DESTINATION__FILESYSTEM__CREDENTIALS__AWS_SECRET_ACCESS_KEY }}
SOURCES__NEWSAPI_PIPELINE__API_KEY: ${{ secrets.SOURCES__NEWSAPI_PIPELINE__API_KEY }}
jobs:
maybe_skip:
runs-on: ubuntu-latest
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
- id: skip_check
uses: fkirc/skip-duplicate-actions@v5
with:
concurrent_skipping: always
skip_after_successful_duplicate: 'false'
do_not_skip: '[]'
run_pipeline:
needs: maybe_skip
if: needs.maybe_skip.outputs.should_skip != 'true'
runs-on: ubuntu-latest
steps:
- name: Check out
uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.11.x
- uses: syphar/restore-virtualenv@v1
id: cache-virtualenv
with:
requirement_files: requirements_github_action.txt
- uses: syphar/restore-pip-download-cache@v1
if: steps.cache-virtualenv.outputs.cache-hit != 'true'
- run: pip install -r requirements_github_action.txt
if: steps.cache-virtualenv.outputs.cache-hit != 'true'
- name: Run pipeline script
run: python 'newsapi_pipeline.py'