Benchmarking tests #152
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Benchmarking tests | |
on: | |
workflow_dispatch: | |
schedule: | |
- cron: "30 1 1,15 * *" # every 2 weeks on the 1st and the 15th of every month at 1:30 AM | |
env: | |
DIFFUSERS_IS_CI: yes | |
HF_HUB_ENABLE_HF_TRANSFER: 1 | |
HF_HOME: /mnt/cache | |
OMP_NUM_THREADS: 8 | |
MKL_NUM_THREADS: 8 | |
jobs: | |
torch_pipelines_cuda_benchmark_tests: | |
env: | |
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL_BENCHMARK }} | |
name: Torch Core Pipelines CUDA Benchmarking Tests | |
strategy: | |
fail-fast: false | |
max-parallel: 1 | |
runs-on: | |
group: aws-g6-4xlarge-plus | |
container: | |
image: diffusers/diffusers-pytorch-compile-cuda | |
options: --shm-size "16gb" --ipc host --gpus 0 | |
steps: | |
- name: Checkout diffusers | |
uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: NVIDIA-SMI | |
run: | | |
nvidia-smi | |
- name: Install dependencies | |
run: | | |
python -m venv /opt/venv && export PATH="/opt/venv/bin:$PATH" | |
python -m uv pip install -e [quality,test] | |
python -m uv pip install pandas peft | |
- name: Environment | |
run: | | |
python utils/print_env.py | |
- name: Diffusers Benchmarking | |
env: | |
HF_TOKEN: ${{ secrets.DIFFUSERS_BOT_TOKEN }} | |
BASE_PATH: benchmark_outputs | |
run: | | |
export TOTAL_GPU_MEMORY=$(python -c "import torch; print(torch.cuda.get_device_properties(0).total_memory / (1024**3))") | |
cd benchmarks && mkdir ${BASE_PATH} && python run_all.py && python push_results.py | |
- name: Test suite reports artifacts | |
if: ${{ always() }} | |
uses: actions/upload-artifact@v4 | |
with: | |
name: benchmark_test_reports | |
path: benchmarks/benchmark_outputs | |
- name: Report success status | |
if: ${{ success() }} | |
run: | | |
pip install requests && python utils/notify_benchmarking_status.py --status=success | |
- name: Report failure status | |
if: ${{ failure() }} | |
run: | | |
pip install requests && python utils/notify_benchmarking_status.py --status=failure |