Skip to content

remove concurrency

remove concurrency #63

name: Contributor Updates
on:
schedule:
- cron: '30 22 * * *' # Daily at 5:30 PM EST (22:30 UTC)
- cron: '0 23 * * 5' # Weekly on Friday at 6:00 PM EST (23:00 UTC)
- cron: '30 23 4 * *' # Monthly on 4th at 6:30 PM EST (23:30 UTC)
push:
branches: ["main"]
workflow_dispatch: # Allows manual triggering
permissions:
contents: write
pages: write
id-token: write
jobs:
update-contributors:
timeout-minutes: 30
runs-on: ubuntu-latest
env:
TZ: 'America/New_York' # Set timezone for consistent timing
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Setup Bun
uses: oven-sh/setup-bun@v1
- name: Setup Pages
uses: actions/configure-pages@v4
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Install dependencies
run: bun install
- name: Set date variables
run: |
echo "TIMESTAMP=$(date +'%Y_%m_%d')" >> $GITHUB_ENV
echo "IS_FRIDAY=$(date +'%u')" >> $GITHUB_ENV
echo "IS_MONTH_START=$(date +'%d')" >> $GITHUB_ENV
echo "DATE_START=2024-10-15" >> $GITHUB_ENV
echo "DATE_END=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
# Daily workflow
- name: Process daily data
env:
GH_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
run: |
# Create directories
mkdir -p data/daily data/daily/history
# Fetch current data with timestamp
bash scripts/fetch_github.sh elizaos eliza --type prs --days 1 | tee data/daily/prs.json data/daily/history/prs_${TIMESTAMP}.json
bash scripts/fetch_github.sh elizaos eliza --type issues --days 1 | tee data/daily/issues.json data/daily/history/issues_${TIMESTAMP}.json
bash scripts/fetch_github.sh elizaos eliza --type commits --days 1 | tee data/daily/commits.json data/daily/history/commits_${TIMESTAMP}.json
# Process data
[ -f data/daily/contributors.json ] && mv data/daily/contributors.json data/daily/history/contributors_${TIMESTAMP}.json || true
[ -f data/daily/summary.json ] && mv data/daily/summary.json data/daily/history/summary_${TIMESTAMP}.json || true
[ -f data/daily/summary.md ] && mv data/daily/summary.md data/daily/history/summary_${TIMESTAMP}.md || true
python scripts/combine_raw.py -p data/daily/prs.json -i data/daily/issues.json -c data/daily/commits.json -o data/daily/combined.json
python scripts/calculate_scores.py data/daily/combined.json data/daily/scored.json
python scripts/summarize.py summary data/daily/scored.json data/daily/contributors.json --model openai
python scripts/summarize_daily.py data/daily/contributors.json -t json data/daily/summary.json --model openai
python scripts/summarize_daily.py data/daily/contributors.json -t md data/daily/summary.md --model openai
python scripts/merge_contributors.py --days 85 --dir data/daily/history -o data/historical_merge.json
python scripts/analyze_contributors.py data/historical_merge.json data/analysis.json --after ${DATE_START} --before ${DATE_END} -f
python scripts/merge_contributors_xp.py data/historical_merge.json data/analysis.json data/historical.json
# Weekly workflow using daily history
- name: Process weekly data and generate thread
if: env.IS_FRIDAY == '5'
env:
GH_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
run: |
# Create directories
mkdir -p data/weekly/history data/weekly/threads/history
# Combine last 7 days of daily data
python3 scripts/merge_contributors.py \
--days 7 \
--dir data/daily/history \
-o data/weekly/scored.json
# Generate weekly summaries
[ -f data/weekly/contributors.json ] && mv data/weekly/contributors.json data/weekly/history/contributors_${TIMESTAMP}.json || true
python scripts/summarize.py summary data/weekly/scored.json data/weekly/contributors.json --model openai
# Generate and manage weekly thread
#bash scripts/generate_history_summaries.sh
bash scripts/manage_thread_history.sh
# Verify and log thread generation
if [ -f "data/weekly/threads/latest.txt" ]; then
echo "Weekly thread generated successfully"
echo "Thread content:"
cat "data/weekly/threads/latest.txt"
else
echo "Warning: Thread generation may have failed"
fi
- name: Archive thread artifacts
if: env.IS_FRIDAY == '5'
uses: actions/upload-artifact@v3
with:
name: weekly-thread
path: |
data/weekly/threads/latest.txt
data/weekly/threads/history/
# Monthly workflow using daily history
- name: Process monthly data
if: env.IS_MONTH_START == '4'
env:
GH_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
run: |
# Create directories
mkdir -p data/monthly/history
# Combine last 31 days of daily data
python scripts/merge_contributors.py \
--days 31 \
--dir data/daily/history \
-o data/monthly/scored.json
# Generate monthly summaries
[ -f data/monthly/contributors.json ] && mv data/monthly/contributors.json data/monthly/history/contributors_${TIMESTAMP}.json || true
python scripts/summarize.py summary data/monthly/scored.json data/monthly/contributors.json --model openai
- name: Build
run: bun run build
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: ./out
- name: Commit and push if changed
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add data/ profiles/
git diff --staged --quiet || (git commit -m "Update contributor data and reports [skip ci]" && git push)
env:
GITHUB_TOKEN: ${{ github.token }}
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: update-and-build

Check failure on line 169 in .github/workflows/weekly-summaries.yml

View workflow run for this annotation

GitHub Actions / Contributor Updates

Invalid workflow file

The workflow is not valid. .github/workflows/weekly-summaries.yml (Line: 169, Col: 12): Job 'deploy' depends on unknown job 'update-and-build'.
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4