Skip to content

modified main.py for github actions #8

modified main.py for github actions

modified main.py for github actions #8

Workflow file for this run

name: Run Scraper Daily
# Set the schedule to run every 24 hours
# on:
# schedule:
# - cron: "0 0 * * *" # This runs at midnight UTC every day
#testing
# Set the schedule to run every 5 minutes
on:
push:
branches:
- scraper-tecq
schedule:
- cron: "*/5 * * * *" # This runs every 5 minutes
workflow_dispatch: # This allows you to run the workflow manually
jobs:
run-scraper:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.13' # Use your required Python version
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Install Chrome
run: |
sudo apt-get update
sudo apt-get install -y google-chrome-stable
- name: Install ChromeDriver
run: |
CHROME_VERSION=$(google-chrome --version | grep -oP '\d+\.\d+\.\d+')
sudo curl -Lo /usr/local/bin/chromedriver "https://chromedriver.storage.googleapis.com/$CHROME_VERSION/chromedriver_linux64.zip"
sudo chmod +x /usr/local/bin/chromedriver
- name: Run Scraper
env:
CHROME_BIN: "/usr/bin/google-chrome" # Optional, in case your script needs the Chrome path
CHROMEDRIVER_PATH: "/usr/local/bin/chromedriver"
run: |
python main.py
- name: Save Excel Output
# Create a file name with the date and time
run: |
timestamp=$(date +"%Y-%m-%d_%H-%M-%S")
mv output.xlsx "output_$timestamp.xlsx"
mv "output_$timestamp.xlsx" results/
- name: Commit and Push the Excel file
run: |
git config --local user.email "[email protected]"
git config --local user.name "GitHub Actions"
git add results/output_*.xlsx
git commit -m "Add data for $timestamp"
git push --force
env:
# GitHub Token for permissions to push
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# make sure to push to scraper-tecq branch