Skip to content

WIP: Use HPC for CI #1550

WIP: Use HPC for CI

WIP: Use HPC for CI #1550

Workflow file for this run

name: CI pipeline for pySDC
on:
push:
pull_request:
schedule:
- cron: '1 5 * * 1'
jobs:
lint:
runs-on: ubuntu-latest
env:
YML: 'etc/environment-lint.yml'
defaults:
run:
shell: bash -l {0}
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Install Conda environment with Micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: ${{ env.YML }}
create-args: >-
python=3.10
- name: Code reformatting with black
run: |
black pySDC --check --diff --color
- name: Linting with flakeheaven
run: |
flakeheaven lint --benchmark pySDC
user_cpu_tests_linux:
runs-on: ubuntu-latest
strategy:
matrix:
python: ['3.7', '3.8', '3.9', '3.10']
env: ['base', 'fenics', 'mpi4py', 'petsc']
defaults:
run:
shell: bash -l {0}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install Conda environment with Micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: "etc/environment-${{ matrix.env }}.yml"
create-args: >-
python=${{ matrix.python }}
- name: Run pytest for CPU stuff
run: |
echo "print('Loading sitecustomize.py...')
import coverage
coverage.process_startup() " > sitecustomize.py
coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m ${{ matrix.env }}
- name: Make coverage report
run: |
mv data data_${{ matrix.python }}
coverage combine
mv .coverage coverage_${{ matrix.env }}_${{ matrix.python }}.dat
- name: Uploading artifacts
uses: actions/upload-artifact@v3
if: matrix.python == '3.10'
with:
name: cpu-test-artifacts
path: |
data_3.10
coverage_${{ matrix.env }}_3.10.dat
user_libpressio_tests:
runs-on: ubuntu-latest
container:
image: brownbaerchen/libpressio:amd64_2
volumes:
- ${{ github.workspace }}:/pySDC
defaults:
run:
shell: bash -l {0}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install pySDC and pytest
run: |
source /pySDC/pySDC/projects/compression/Docker/install_pySDC.sh
- name: Run pytest
run: |
source /opt/spack/share/spack/setup-env.sh
spack load libpressio
coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m libpressio
- name: Make coverage report
run: |
source /opt/spack/share/spack/setup-env.sh
spack load libpressio
mv data data_libpressio
coverage combine
mv .coverage coverage_libpressio_3.10.dat
- name: Upload artifacts
uses: actions/upload-artifact@v3
with:
name: cpu-test-artifacts
path: |
data_libpressio
coverage_libpressio_3.10.dat
# user_cpu_tests_macos:
# runs-on: macos-12
#
# strategy:
# matrix:
# env: ['base', 'fenics', 'mpi4py', 'petsc']
#
# defaults:
# run:
# shell: bash -l {0}
#
# steps:
# - name: Checkout
# uses: actions/checkout@v3
#
# - name: Install Conda environment with Micromamba
# uses: mamba-org/setup-micromamba@v1
# with:
# environment-file: "etc/environment-${{ matrix.env }}.yml"
#
# - name: Run pytest for CPU stuff
# run: |
# pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m ${{ matrix.env }}
# # - name: Get and prepare artifacts
# # run: |
# # pipeline_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/repository/commits/${{ github.head_ref || github.ref_name }}" | jq '.last_pipeline.id')
# # job_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/pipelines/$pipeline_id/jobs" | jq '.[] | select( .name == "bundle" ) | select( .status == "success" ) | .id')
# # curl --output artifacts.zip "https://gitlab.hzdr.de/api/v4/projects/3525/jobs/$job_id/artifacts"
# # rm -rf data
# # unzip artifacts.zip
# # ls -ratl
post-processing:
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'Parallel-in-Time'}}
needs:
- lint
- user_cpu_tests_linux
- user_libpressio_tests
# - wait_for_gitlab
defaults:
run:
shell: bash -l {0}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install Conda environment with Micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: "etc/environment-base.yml"
- name: Downloading artifacts
uses: actions/download-artifact@v3
with:
path: .
- name: Prepare artifacts
run: |
ls -artl cpu-test-artifacts
cp cpu-test-artifacts/data_3.10/* data/.
python -m coverage combine cpu-test-artifacts/coverage_*.dat
python -m coverage xml
python -m coverage html
- name: Generate Coverage badge
run: |
pip install genbadge[all]
genbadge coverage -i coverage.xml -o htmlcov/coverage-badge.svg
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
# - name: Generate benchmark report
# uses: pancetta/github-action-benchmark@v1
# if: ${{ (!contains(github.event.head_commit.message, '[CI-no-benchmarks]')) && (github.event_name == 'push') }}
# with:
# name: pySDC Benchmark with pytest-benchmark
# tool: 'pytest'
# output-file-path: benchmarks/output.json
# auto-push: false
# skip-fetch-gh-pages: true
# benchmark-data-dir-path: bench/${{ github.head_ref || github.ref_name }}
# github-token: ${{ secrets.GITHUB_TOKEN }}
#
# - name: Push benchmark data
# uses: dmnemec/copy_file_to_another_repo_action@main
# if: "!contains(github.event.head_commit.message, '[CI-no-benchmarks]')"
# env:
# API_TOKEN_GITHUB: ${{ secrets.BENCHMARK_BOT }}
# with:
# source_file: 'bench/${{ github.head_ref || github.ref_name }}'
# destination_repo: 'Parallel-in-Time/pySDC-benchmarks'
# user_email: '[email protected]'
# user_name: 'pancetta'
- name: Build html I
run: |
chmod u+x ./docs/update_apidocs.sh
./docs/update_apidocs.sh
sphinx-build -b html docs/source docs/build/html
mkdir -p docs/build/html/coverage
mv htmlcov/* docs/build/html/coverage/.
# - name: Build html II
# if: "!contains(github.event.head_commit.message, '[CI-no-benchmarks]')"
# run: |
# mkdir -p docs/build/html/benchmarks
# cp -r bench/${{ github.head_ref || github.ref_name }}/* docs/build/html/benchmarks/.
- name: Store docs
uses: actions/upload-artifact@v3
with:
name: docs
path: docs/build/html
- name: Deploy 🚀
uses: JamesIves/github-pages-deploy-action@v4
if: ${{ github.event_name == 'push' && github.repository_owner == 'Parallel-in-Time' && github.ref == 'refs/heads/master' }}
with:
branch: gh-pages # The branch the action should deploy to.
folder: docs/build/html # The folder the action should deploy.
# spawn_gitlab_and_postprocess:
# runs-on: ubuntu-latest
#
## needs:
## - lint
#
# defaults:
# run:
# shell: bash -l {0}
#
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
#
# steps:
# - name: Checkout
# uses: actions/checkout@v3
# with:
# fetch-depth: 0
# ref: ${{ github.event.pull_request.head.sha }}
#
# - name: Checkout benchmark repository
# uses: actions/checkout@v3
# if: ${{ github.event_name == 'push' }}
# with:
# repository: Parallel-in-Time/pySDC-benchmarks
# token: ${{ secrets.BENCHMARK_BOT }}
# path: bench
# ref: main
#
# - name: Install Conda environment with Micromamba
# uses: mamba-org/provision-with-micromamba@main
# with:
# environment-file: etc/environment-base.yml
#
# - name: Mirror + trigger CI
# uses: SvanBoxel/gitlab-mirror-and-ci-action@master
# with:
# args: "https://gitlab.hzdr.de/r.speck/pysdc"
# env:
# FORCE_PUSH: "true"
# GITLAB_HOSTNAME: "gitlab.hzdr.de"
# GITLAB_USERNAME: ${{ secrets.GITLAB_SECRET_USER }}
# GITLAB_PASSWORD: ${{ secrets.GITLAB_SECRET_PASS }}
# GITLAB_PROJECT_ID: "3525"
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
#
# - name: Get and prepare artifacts
# run: |
# pipeline_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/repository/commits/${{ github.head_ref || github.ref_name }}" | jq '.last_pipeline.id')
# job_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/pipelines/$pipeline_id/jobs" | jq '.[] | select( .name == "bundle" ) | select( .status == "success" ) | .id')
# curl --output artifacts.zip "https://gitlab.hzdr.de/api/v4/projects/3525/jobs/$job_id/artifacts"
# rm -rf data
# unzip artifacts.zip
#