From b216d0dc2cf0f2dc9f981bdeb6633fac8cb7731f Mon Sep 17 00:00:00 2001 From: jakob-fritz <37077134+jakob-fritz@users.noreply.github.com> Date: Wed, 15 May 2024 12:10:39 +0200 Subject: [PATCH] Housekeeping (#430) * Updated YAML to comply with linter This is mainly adding a start-markers * More YAML-Updates to comply Removed empty lines for better readability and removed some comments to make files more compact (again to improve readability) * Updated checkout-action to use newer NodeJS * Reduce number of python-versions for fater CI during development * Removed KIT-Tests in Gitlab * Updated upload-artifact-action * Updated path to images in libpressio and monodomain * Reverted to earlier upload artifact because of breaking change * Switched to upload-artifact@v4 and set individual names * Slightly changed name of artifacts for easier use of wildcards * Skipping two long running project tests for faster results * Updated paths in postprocessing * Readded projects to CI and removed printing from CI * Cupyx seesm to be needed for creation of docs * Corrected import statement * Changed name of readme to rst * Updated paths of images * Create two used images in CI * Move installation of dependencies into env-file * Updated names of links to avoid duplicate "here" * Use all python versions in CI again * Changed version of codecov and added token * Removed unused jobs in CI --- .github/dependabot.yml | 28 +-- .github/workflows/ci_pipeline.yml | 200 +++--------------- .gitlab-ci.yml | 89 +------- docs/update_apidocs.sh | 2 - etc/environment-base.yml | 2 + etc/environment-cupy.yml | 2 + etc/environment-fenics.yml | 2 + etc/environment-lint.yml | 4 +- etc/environment-mpi4py.yml | 2 + etc/environment-petsc.yml | 2 + etc/environment-postprocess.yml | 8 + etc/environment-pytorch.yml | 2 + etc/environment-tests.yml | 2 + .../environment-tutorial.yml | 2 + .../AllenCahn_Bayreuth/environment.yml | 3 +- pySDC/projects/AsympConv/environment.yml | 2 + pySDC/projects/DAE/environment.yml | 2 + .../projects/FastWaveSlowWave/environment.yml | 2 + pySDC/projects/Hamiltonian/environment.yml | 2 + pySDC/projects/Monodomain/README.rst | 10 +- .../Monodomain/etc/environment-monodomain.yml | 2 + pySDC/projects/PinTSimE/README.rst | 2 +- pySDC/projects/PinTSimE/environment.yml | 2 + pySDC/projects/RDC/environment.yml | 2 + pySDC/projects/Resilience/README.rst | 2 +- pySDC/projects/Resilience/environment.yml | 2 + pySDC/projects/Resilience/tests/test_order.py | 8 + pySDC/projects/SDC_showdown/environment.yml | 2 + .../projects/Second_orderSDC/environment.yml | 2 + pySDC/projects/TOMS/environment.yml | 2 + pySDC/projects/compression/README.rst | 8 +- pySDC/projects/matrixPFASST/environment.yml | 2 + pySDC/projects/parallelSDC/environment.yml | 2 + .../{README.md => README.rst} | 0 .../parallelSDC_reloaded/environment.yml | 2 + pySDC/projects/soft_failure/environment.yml | 2 + pySDC/tests/README.rst | 2 +- 37 files changed, 130 insertions(+), 282 deletions(-) rename pySDC/projects/parallelSDC_reloaded/{README.md => README.rst} (100%) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index b2ed0c944c..395ade51bf 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,15 +1,17 @@ +--- + version: 2 updates: -- package-ecosystem: pip - directory: "/" - schedule: - interval: daily - open-pull-requests-limit: 10 - ignore: - - dependency-name: fenics - versions: - - ">= 2019.1.a" - - "< 2019.2" - - dependency-name: sphinx - versions: - - "> 1.8.5" + - package-ecosystem: pip + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 + ignore: + - dependency-name: fenics + versions: + - ">= 2019.1.a" + - "< 2019.2" + - dependency-name: sphinx + versions: + - "> 1.8.5" diff --git a/.github/workflows/ci_pipeline.yml b/.github/workflows/ci_pipeline.yml index 02f0f7f277..9db925d391 100644 --- a/.github/workflows/ci_pipeline.yml +++ b/.github/workflows/ci_pipeline.yml @@ -1,3 +1,5 @@ +--- + name: CI pipeline for pySDC on: @@ -10,86 +12,71 @@ jobs: lint: runs-on: ubuntu-latest - env: YML: 'etc/environment-lint.yml' - defaults: run: shell: bash -l {0} - steps: - name: Checkout uses: actions/checkout@v1 - - name: Install Conda environment with Micromamba uses: mamba-org/setup-micromamba@v1 with: environment-file: ${{ env.YML }} create-args: >- python=3.10 - - name: Code reformatting with black run: | black pySDC --check --diff --color - - name: Linting with flakeheaven run: | flakeheaven lint --benchmark pySDC user_cpu_tests_linux: runs-on: ubuntu-latest - strategy: fail-fast: false matrix: env: ['base', 'fenics', 'mpi4py', 'petsc', 'pytorch'] python: ['3.8', '3.9', '3.10', '3.11', '3.12'] - defaults: run: shell: bash -l {0} - steps: - name: Checkout - uses: actions/checkout@v3 - + uses: actions/checkout@v4 - name: Install Conda environment with Micromamba uses: mamba-org/setup-micromamba@v1 with: environment-file: "etc/environment-${{ matrix.env }}.yml" create-args: >- python=${{ matrix.python }} - - name: Install additional packages as needed run: | micromamba install -y --file etc/environment-tests.yml --freeze-installed - - name: Run pytest for CPU stuff run: | echo "print('Loading sitecustomize.py...') import coverage coverage.process_startup() " > sitecustomize.py coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m ${{ matrix.env }} - - name: Make coverage report run: | mv data data_${{ matrix.python }} coverage combine mv .coverage coverage_${{ matrix.env }}_${{ matrix.python }}.dat - - name: Uploading artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: matrix.python == '3.10' with: - name: cpu-test-artifacts + name: test-artifacts-cpu-${{ matrix.env }} path: | data_3.10 coverage_${{ matrix.env }}_3.10.dat project_cpu_tests_linux: runs-on: ubuntu-latest - strategy: fail-fast: false matrix: @@ -98,7 +85,6 @@ jobs: - AsympConv - DAE - FastWaveSlowWave - # - GPU - Hamiltonian - matrixPFASST - parallelSDC @@ -116,76 +102,62 @@ jobs: python: '3.11' - env: parallelSDC python: '3.12' - defaults: run: shell: bash -l {0} - steps: - name: Checkout - uses: actions/checkout@v3 - + uses: actions/checkout@v4 - name: Install Conda environment with Micromamba uses: mamba-org/setup-micromamba@v1 with: environment-file: "pySDC/projects/${{ matrix.env }}/environment.yml" create-args: >- python=${{ matrix.python }} - - name: Install additional packages as needed run: | micromamba install -y --file etc/environment-tests.yml --freeze-installed - - name: Run pytest for CPU stuff run: | echo "print('Loading sitecustomize.py...') import coverage coverage.process_startup() " > sitecustomize.py coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/projects/${{ matrix.env }}/tests - - name: Make coverage report run: | mv data data_${{ matrix.python }} coverage combine mv .coverage coverage_${{ matrix.env }}_${{ matrix.python }}.dat - - name: Uploading artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: matrix.python == '3.10' with: - name: cpu-test-artifacts + name: test-artifacts-project-${{ matrix.env }} path: | data_3.10 coverage_${{ matrix.env }}_3.10.dat user_libpressio_tests: runs-on: ubuntu-latest - container: image: brownbaerchen/libpressio:amd64_2 volumes: - ${{ github.workspace }}:/pySDC - defaults: run: shell: bash -l {0} - steps: - - name: Checkout - uses: actions/checkout@v3 - + uses: actions/checkout@v4 - name: Install pySDC and pytest run: | source /pySDC/pySDC/projects/compression/Docker/install_pySDC.sh - - name: Run pytest run: | source /opt/spack/share/spack/setup-env.sh spack load libpressio coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/projects/compression/tests -m libpressio - - name: Make coverage report run: | source /opt/spack/share/spack/setup-env.sh @@ -194,100 +166,58 @@ jobs: mv data data_libpressio coverage combine mv .coverage coverage_libpressio_3.10.dat - - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: cpu-test-artifacts + name: test-artifacts-libpressio path: | data_libpressio coverage_libpressio_3.10.dat user_monodomain_tests_linux: runs-on: ubuntu-latest - defaults: run: shell: bash -l {0} - steps: - name: Checkout - uses: actions/checkout@v3 - + uses: actions/checkout@v4 - name: Install Conda environment with Micromamba uses: mamba-org/setup-micromamba@v1 with: environment-file: "pySDC/projects/Monodomain/etc/environment-monodomain.yml" create-args: >- python=3.10 - - name: Compile C++ ionic models env: IONIC_MODELS_PATH: "pySDC/projects/Monodomain/problem_classes/ionicmodels/cpp" - run: | - c++ -O3 -Wall -shared -std=c++11 -fPIC -fvisibility=hidden $(python3 -m pybind11 --includes) ${IONIC_MODELS_PATH}/bindings_definitions.cpp -o ${IONIC_MODELS_PATH}/ionicmodels$(python3-config --extension-suffix) - + run: > + c++ -O3 -Wall -shared -std=c++11 -fPIC -fvisibility=hidden + $(python3 -m pybind11 --includes) + ${IONIC_MODELS_PATH}/bindings_definitions.cpp + -o ${IONIC_MODELS_PATH}/ionicmodels$(python3-config --extension-suffix) - name: Run pytest for CPU stuff run: | echo "print('Loading sitecustomize.py...') import coverage coverage.process_startup() " > sitecustomize.py coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/projects/Monodomain/tests -m monodomain - - name: Make coverage report run: | mv data data_monodomain coverage combine mv .coverage coverage_monodomain_3.10.dat - - name: Uploading artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: cpu-test-artifacts + name: test-artifacts-monodomain path: | data_monodomain coverage_monodomain_3.10.dat - -# user_cpu_tests_macos: -# runs-on: macos-12 -# -# strategy: -# matrix: -# env: ['base', 'fenics', 'mpi4py', 'petsc'] -# -# defaults: -# run: -# shell: bash -l {0} -# -# steps: -# - name: Checkout -# uses: actions/checkout@v3 -# -# - name: Install Conda environment with Micromamba -# uses: mamba-org/setup-micromamba@v1 -# with: -# environment-file: "etc/environment-${{ matrix.env }}.yml" -# -# - name: Run pytest for CPU stuff -# run: | -# pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m ${{ matrix.env }} - -# # - name: Get and prepare artifacts -# # run: | -# # pipeline_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/repository/commits/${{ github.head_ref || github.ref_name }}" | jq '.last_pipeline.id') -# # job_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/pipelines/$pipeline_id/jobs" | jq '.[] | select( .name == "bundle" ) | select( .status == "success" ) | .id') -# # curl --output artifacts.zip "https://gitlab.hzdr.de/api/v4/projects/3525/jobs/$job_id/artifacts" -# # rm -rf data -# # unzip artifacts.zip -# # ls -ratl - - post-processing: runs-on: ubuntu-latest - if: ${{ github.repository_owner == 'Parallel-in-Time'}} - needs: - lint - user_cpu_tests_linux @@ -295,40 +225,34 @@ jobs: - user_libpressio_tests - user_monodomain_tests_linux # - wait_for_gitlab - defaults: run: shell: bash -l {0} - steps: - name: Checkout - uses: actions/checkout@v3 - + uses: actions/checkout@v4 - name: Install Conda environment with Micromamba uses: mamba-org/setup-micromamba@v1 with: environment-file: "etc/environment-postprocess.yml" - - name: Downloading artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: . - + merge-multiple: true - name: Prepare artifacts run: | - ls -artl cpu-test-artifacts - cp cpu-test-artifacts/data_3.10/* data/. - python -m coverage combine cpu-test-artifacts/coverage_*_3.10.dat + cp data_3.10/* data/. + python -m coverage combine coverage_*_3.10.dat python -m coverage xml python -m coverage html - - name: Generate Coverage badge run: | - pip install genbadge[all] genbadge coverage -i coverage.xml -o htmlcov/coverage-badge.svg - - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV }} # - name: Generate benchmark report # uses: pancetta/github-action-benchmark@v1 @@ -360,77 +284,17 @@ jobs: sphinx-build -b html docs/source docs/build/html mkdir -p docs/build/html/coverage mv htmlcov/* docs/build/html/coverage/. - -# - name: Build html II -# if: "!contains(github.event.head_commit.message, '[CI-no-benchmarks]')" -# run: | -# mkdir -p docs/build/html/benchmarks -# cp -r bench/${{ github.head_ref || github.ref_name }}/* docs/build/html/benchmarks/. - - name: Store docs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: docs path: docs/build/html - - name: Deploy 🚀 uses: JamesIves/github-pages-deploy-action@v4 - if: ${{ github.event_name == 'push' && github.repository_owner == 'Parallel-in-Time' && github.ref == 'refs/heads/master' }} + if: >- + ${{ github.event_name == 'push' + && github.repository_owner == 'Parallel-in-Time' + && github.ref == 'refs/heads/master' }} with: branch: gh-pages # The branch the action should deploy to. folder: docs/build/html # The folder the action should deploy. - -# spawn_gitlab_and_postprocess: -# runs-on: ubuntu-latest -# -## needs: -## - lint -# -# defaults: -# run: -# shell: bash -l {0} -# -# env: -# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -# -# steps: -# - name: Checkout -# uses: actions/checkout@v3 -# with: -# fetch-depth: 0 -# ref: ${{ github.event.pull_request.head.sha }} -# -# - name: Checkout benchmark repository -# uses: actions/checkout@v3 -# if: ${{ github.event_name == 'push' }} -# with: -# repository: Parallel-in-Time/pySDC-benchmarks -# token: ${{ secrets.BENCHMARK_BOT }} -# path: bench -# ref: main -# -# - name: Install Conda environment with Micromamba -# uses: mamba-org/provision-with-micromamba@main -# with: -# environment-file: etc/environment-base.yml -# -# - name: Mirror + trigger CI -# uses: SvanBoxel/gitlab-mirror-and-ci-action@master -# with: -# args: "https://gitlab.hzdr.de/r.speck/pysdc" -# env: -# FORCE_PUSH: "true" -# GITLAB_HOSTNAME: "gitlab.hzdr.de" -# GITLAB_USERNAME: ${{ secrets.GITLAB_SECRET_USER }} -# GITLAB_PASSWORD: ${{ secrets.GITLAB_SECRET_PASS }} -# GITLAB_PROJECT_ID: "3525" -# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -# -# - name: Get and prepare artifacts -# run: | -# pipeline_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/repository/commits/${{ github.head_ref || github.ref_name }}" | jq '.last_pipeline.id') -# job_id=$(curl --header "PRIVATE-TOKEN: ${{ secrets.GITLAB_SECRET_H }}" --silent "https://gitlab.hzdr.de/api/v4/projects/3525/pipelines/$pipeline_id/jobs" | jq '.[] | select( .name == "bundle" ) | select( .status == "success" ) | .id') -# curl --output artifacts.zip "https://gitlab.hzdr.de/api/v4/projects/3525/jobs/$job_id/artifacts" -# rm -rf data -# unzip artifacts.zip -# diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a166d70de7..a757fb1155 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -82,64 +82,6 @@ test_JUWELS: - cat sbatch.out -#test_kit: -# image: rcaspart/micromamba-cuda -# stage: benchmark -# variables: -# USE_NAME: "pySDC-test" -# SLURM_PARTITION: "dev_accelerated" -# SLURM_TIME: "00:11:00" -# SLURM_GRES: "gpu:1" -# rules: -# - if: $CI_COMMIT_MESSAGE !~ /.*\[CI-no-benchmarks\]/ -# tags: -# - kit -# parallel: -# matrix: -# - PYTHON: [ '3.9', '3.10' ] -# artifacts: -# name: "gpu_$PYTHON" -# paths: -# - coverage_cupy_3.10.dat -# - data_3.10 -# before_script: -# - cat /etc/environment -# - micromamba create --yes python=$PYTHON -f etc/environment-cupy.yml -# - eval "$(micromamba shell hook --shell=bash)" -# - micromamba activate pySDC -# - micromamba install --yes -c conda-forge openssh -# script: -# - coverage run --data-file=coverage_cupy_${PYTHON}.dat -m pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m cupy -# - mv data data_${PYTHON} - - -#test_kit_bare: -# stage: test -# rules: -# - if: $CI_COMMIT_MESSAGE !~ /.*\[CI-no-GPU\].*/ -# tags: -# - kit -# parallel: -# matrix: -# - PYTHON: [ '3.7', '3.9', '3.10' ] -# artifacts: -# name: "gpu_$PYTHON" -# paths: -# - coverage_cupy_3.10.dat -# - data_3.10 -# before_script: -# - module load devel/cuda -# - curl micro.mamba.pm/install.sh | bash -# - micromamba create --yes python=$PYTHON -f etc/environment-cupy.yml -# - micromamba activate pySDC -# script: -# - srun -p dev_accelerated -t 00:20:00 -N1 --gres gpu:1 coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m cupy -# - coverage combine -# - mv .coverage coverage_${{ matrix.env }}_${{ matrix.python }}.dat -# - mv data data_${PYTHON} -# - chmod +rwx data_${PYTHON} -# - cat coverage_cupy_${PYTHON}.dat - benchmark: image: mambaorg/micromamba stage: benchmark @@ -159,34 +101,11 @@ benchmark: - git config --global --add safe.directory '*' script: - mkdir -p benchmarks - - pytest --continue-on-collection-errors -v pySDC/tests -m "benchmark" --benchmark-json=benchmarks/output.json - -#benchmark_kit: -# image: rcaspart/micromamba -# stage: benchmark -# variables: -# USE_NAME: "pySDC-benchmark" -# SLURM_PARTITION: "dev_cpuonly" -# SLURM_TIME: "00:11:00" -# rules: -# - if: $CI_COMMIT_MESSAGE !~ /.*\[CI-no-benchmarks\]/ -# tags: -# - kit -# artifacts: -# paths: -# - benchmarks -# before_script: -# - micromamba create --yes -f etc/environment-base.yml -# - eval "$(micromamba shell hook --shell=bash)" -# - micromamba activate pySDC -# - micromamba install -c conda-forge git openssh -# - git config --global --add safe.directory '*' -# script: -# - mkdir -p benchmarks -# - pytest --continue-on-collection-errors -v pySDC/tests -m "benchmark" --benchmark-json=benchmarks/output.json - + - >- + pytest --continue-on-collection-errors -v pySDC/tests -m "benchmark" + --benchmark-json=benchmarks/output.json -#bundle: +# bundle: # image: mambaorg/micromamba # stage: upload # artifacts: diff --git a/docs/update_apidocs.sh b/docs/update_apidocs.sh index a605b41ff4..63dcba9cb8 100755 --- a/docs/update_apidocs.sh +++ b/docs/update_apidocs.sh @@ -26,6 +26,4 @@ ${SPHINX_APIDOC} -o docs/source/pySDC pySDC/implementations --force -T -d 2 -e ${SPHINX_APIDOC} -o docs/source/pySDC pySDC/helpers --force -T -d 2 -e #rm docs/source/pySDC/pySDC.rst -echo "Running : pip install sphinxemoji m2r2" -pip install sphinxemoji m2r2 --quiet ./docs/convert_markdown.py diff --git a/etc/environment-base.yml b/etc/environment-base.yml index 6ee1d85923..3f326fbf87 100644 --- a/etc/environment-base.yml +++ b/etc/environment-base.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/etc/environment-cupy.yml b/etc/environment-cupy.yml index dab002f58e..a1de245346 100644 --- a/etc/environment-cupy.yml +++ b/etc/environment-cupy.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/etc/environment-fenics.yml b/etc/environment-fenics.yml index 8604b24a97..13129e61a8 100644 --- a/etc/environment-fenics.yml +++ b/etc/environment-fenics.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/etc/environment-lint.yml b/etc/environment-lint.yml index d4f84751b8..502e1734f6 100644 --- a/etc/environment-lint.yml +++ b/etc/environment-lint.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge @@ -6,4 +8,4 @@ dependencies: - black - flakeheaven - flake8-comprehensions - - flake8-bugbear \ No newline at end of file + - flake8-bugbear diff --git a/etc/environment-mpi4py.yml b/etc/environment-mpi4py.yml index c33d3f6b6d..e1d82af566 100644 --- a/etc/environment-mpi4py.yml +++ b/etc/environment-mpi4py.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/etc/environment-petsc.yml b/etc/environment-petsc.yml index cc22fd7145..d7c1500c7a 100644 --- a/etc/environment-petsc.yml +++ b/etc/environment-petsc.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/etc/environment-postprocess.yml b/etc/environment-postprocess.yml index 88926a7cbe..8069e154d0 100644 --- a/etc/environment-postprocess.yml +++ b/etc/environment-postprocess.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge @@ -10,3 +12,9 @@ dependencies: - matplotlib - dill - numba + - cupy + - pip + - pip: + - sphinxemoji + - m2r2 + - genbadge[all] diff --git a/etc/environment-pytorch.yml b/etc/environment-pytorch.yml index 5c5658c807..c690be37ff 100644 --- a/etc/environment-pytorch.yml +++ b/etc/environment-pytorch.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/etc/environment-tests.yml b/etc/environment-tests.yml index e96ec1050c..73dec81588 100644 --- a/etc/environment-tests.yml +++ b/etc/environment-tests.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/playgrounds/12th_PinT_Workshop/environment-tutorial.yml b/pySDC/playgrounds/12th_PinT_Workshop/environment-tutorial.yml index 7384dd9512..f8333ea049 100644 --- a/pySDC/playgrounds/12th_PinT_Workshop/environment-tutorial.yml +++ b/pySDC/playgrounds/12th_PinT_Workshop/environment-tutorial.yml @@ -1,3 +1,5 @@ +--- + name: pySDC_tutorial channels: - conda-forge diff --git a/pySDC/projects/AllenCahn_Bayreuth/environment.yml b/pySDC/projects/AllenCahn_Bayreuth/environment.yml index 91f3ae538f..0e5aae0eb1 100644 --- a/pySDC/projects/AllenCahn_Bayreuth/environment.yml +++ b/pySDC/projects/AllenCahn_Bayreuth/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge @@ -8,4 +10,3 @@ dependencies: - matplotlib>=3.0 - dill>=0.2.6 - scipy>=0.17.1 - diff --git a/pySDC/projects/AsympConv/environment.yml b/pySDC/projects/AsympConv/environment.yml index 998ab0efa2..2cc7e8544a 100644 --- a/pySDC/projects/AsympConv/environment.yml +++ b/pySDC/projects/AsympConv/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/DAE/environment.yml b/pySDC/projects/DAE/environment.yml index 3d332962d4..e5da507653 100644 --- a/pySDC/projects/DAE/environment.yml +++ b/pySDC/projects/DAE/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/FastWaveSlowWave/environment.yml b/pySDC/projects/FastWaveSlowWave/environment.yml index d2b8d7e218..8d2324247c 100644 --- a/pySDC/projects/FastWaveSlowWave/environment.yml +++ b/pySDC/projects/FastWaveSlowWave/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/Hamiltonian/environment.yml b/pySDC/projects/Hamiltonian/environment.yml index 59d683a4c9..ad705cea1a 100644 --- a/pySDC/projects/Hamiltonian/environment.yml +++ b/pySDC/projects/Hamiltonian/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/Monodomain/README.rst b/pySDC/projects/Monodomain/README.rst index fd0e4d344d..7cf6ee8127 100644 --- a/pySDC/projects/Monodomain/README.rst +++ b/pySDC/projects/Monodomain/README.rst @@ -61,9 +61,9 @@ We display here the stability domain of the ESDC and SDC methods, both with IMEX with :math:`\lambda_I,\lambda_E,\lambda_e` representing :math:`f_I,f_E,f_e`, respectively. We fix :math:`\lambda_E=-1` and vary the stiff terms :math:`\lambda_I,\lambda_e` only. We see that the ESDC method is stable for all tested values of :math:`\lambda_I,\lambda_e`, while SDC is not. -.. image:: ../../../data/stability_domain_IMEXEXP_EXPRK.png +.. image:: ../../../data_monodomain/stability_domain_IMEXEXP_EXPRK.png :scale: 60 % -.. image:: ../../../data/stability_domain_IMEXEXP.png +.. image:: ../../../data_monodomain/stability_domain_IMEXEXP.png :scale: 60 % Convergence @@ -73,7 +73,7 @@ We fix the number of collocation nodes to :math:`m=6` and perform a convergence We use the ten Tusscher-Panfilov ionic model, which is employed in practical applications. We see that we gain one order of accuracy per sweep, as expected. -.. image:: ../../../data/convergence_ESDC_fixed_iter.png +.. image:: ../../../data_monodomain/convergence_ESDC_fixed_iter.png :scale: 100 % @@ -88,7 +88,7 @@ Here we consider three methods: We display the number of iterations required by each method to reach a given tolerance and the residual at convergence. As ionic model we use again the ten Tusscher-Panfilov model. We see that PFASST requires a reasonalbly small number of iterations, comparable to the serial counterparts ESDC and MLESDC. -.. image:: ../../../data/niter_VS_time.png +.. image:: ../../../data_monodomain/niter_VS_time.png :scale: 100 % -.. image:: ../../../data/res_VS_time.png +.. image:: ../../../data_monodomain/res_VS_time.png :scale: 100 % \ No newline at end of file diff --git a/pySDC/projects/Monodomain/etc/environment-monodomain.yml b/pySDC/projects/Monodomain/etc/environment-monodomain.yml index 3fe8c09c3a..4151479679 100644 --- a/pySDC/projects/Monodomain/etc/environment-monodomain.yml +++ b/pySDC/projects/Monodomain/etc/environment-monodomain.yml @@ -1,3 +1,5 @@ +--- + name: pySDC_monodomain channels: - conda-forge diff --git a/pySDC/projects/PinTSimE/README.rst b/pySDC/projects/PinTSimE/README.rst index a417218a6f..3f157b5fb7 100644 --- a/pySDC/projects/PinTSimE/README.rst +++ b/pySDC/projects/PinTSimE/README.rst @@ -36,7 +36,7 @@ Buck converter -------------- In the test case of the buck converter there are multiple switches in the considered time domain. In the so-called open-loop control, a controller monitors the actual output voltage. It compares the output with a target output voltage. Regularly, after a fixed number of time steps, the duty cycle to control the switching is recalculated based on the error. The simulation illustrates the switching behavior in the simulation: The voltage values settle down between the reference. -.. image:: ../../../data/buck_model_solution.png +.. image:: ../../../data/buck_converter_model_solution.png :width: 35% :align: center diff --git a/pySDC/projects/PinTSimE/environment.yml b/pySDC/projects/PinTSimE/environment.yml index bcc3f1bc76..63f08b5344 100644 --- a/pySDC/projects/PinTSimE/environment.yml +++ b/pySDC/projects/PinTSimE/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/RDC/environment.yml b/pySDC/projects/RDC/environment.yml index 7b7d8f3827..142bd3dcd8 100644 --- a/pySDC/projects/RDC/environment.yml +++ b/pySDC/projects/RDC/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/Resilience/README.rst b/pySDC/projects/Resilience/README.rst index f89e8db843..becbd2742f 100644 --- a/pySDC/projects/Resilience/README.rst +++ b/pySDC/projects/Resilience/README.rst @@ -9,7 +9,7 @@ The first strategy we try is `Adaptivity `_, which is designed purely as a detector for soft faults. We have also simulated faults in the van der Pol problem and tried recovering them with the strategies `here `_. -We also experimented with faults in the Lorenz attractor problem `here `_. +We also experimented with faults in the Lorenz attractor problem. See `Resilience in the Lorenz Attractor `_. Tests ----- diff --git a/pySDC/projects/Resilience/environment.yml b/pySDC/projects/Resilience/environment.yml index a085015a2d..70ddb575a5 100644 --- a/pySDC/projects/Resilience/environment.yml +++ b/pySDC/projects/Resilience/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/Resilience/tests/test_order.py b/pySDC/projects/Resilience/tests/test_order.py index 52d5f4b5d4..d8a27ebb67 100644 --- a/pySDC/projects/Resilience/tests/test_order.py +++ b/pySDC/projects/Resilience/tests/test_order.py @@ -20,3 +20,11 @@ def test_order_adaptive_step_size(ks, serial): fig, ax = plt.subplots() plot_all_errors(ax, ks, serial, Tend_fixed=5e-1, var='e_tol', dt_list=[1e-5, 5e-6], avoid_restarts=False) + + +@pytest.mark.base +def test_accuracy_check(): + print(locals()) + from pySDC.projects.Resilience.accuracy_check import check_order_against_step_size + + check_order_against_step_size() diff --git a/pySDC/projects/SDC_showdown/environment.yml b/pySDC/projects/SDC_showdown/environment.yml index 2917d1ad6f..54a7112dff 100644 --- a/pySDC/projects/SDC_showdown/environment.yml +++ b/pySDC/projects/SDC_showdown/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/Second_orderSDC/environment.yml b/pySDC/projects/Second_orderSDC/environment.yml index 95ab6a5fff..d36deda800 100644 --- a/pySDC/projects/Second_orderSDC/environment.yml +++ b/pySDC/projects/Second_orderSDC/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/TOMS/environment.yml b/pySDC/projects/TOMS/environment.yml index 1343111e30..8c434afb9d 100644 --- a/pySDC/projects/TOMS/environment.yml +++ b/pySDC/projects/TOMS/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/compression/README.rst b/pySDC/projects/compression/README.rst index 393d594c63..e8ccaec5be 100644 --- a/pySDC/projects/compression/README.rst +++ b/pySDC/projects/compression/README.rst @@ -52,18 +52,18 @@ We measure the local order of accuracy in time and verify that it increases by o While the order is typically only maintained up to machine precision or the discretization error, we find now that accuracy now stalls at the error bound that we set for the compressor. See below for corresponding figures, where the difference between the colored lines is the number of SDC iterations and the dashed line marks the error bound for SZ3. -.. image:: ../../../data/compression_order_time_advection_d=1.00e-06_n=1_MPI=False.png +.. image:: ../../../data_libpressio/compression_order_time_advection_d=1.00e-06_n=1_MPI=False.png :width: 45% -.. image:: ../../../data/compression_order_time_advection_d=1.00e-06_n=1_MPI=True.png +.. image:: ../../../data_libpressio/compression_order_time_advection_d=1.00e-06_n=1_MPI=True.png :width: 45% It has recently been `demonstrated `_ that the small scale PinT algorithm Block Gauss Seidel SDC maintains the order of single step SDC, so we can repeat the same test but with multiple processors, each with their own time step to solve: -.. image:: ../../../data/compression_order_time_advection_d=1.00e-06_n=4_MPI=False.png +.. image:: ../../../data_libpressio/compression_order_time_advection_d=1.00e-06_n=4_MPI=False.png :width: 45% -.. image:: ../../../data/compression_order_time_advection_d=1.00e-06_n=4_MPI=True.png +.. image:: ../../../data_libpressio/compression_order_time_advection_d=1.00e-06_n=4_MPI=True.png :width: 45% The above plots showcase that both time-serial SDC as well as time-parallel Block Gauss-Seidel SDC do not suffer from compression when the compression error bound is below other numerical errors and that both the MPI and simulated parallelism versions work. diff --git a/pySDC/projects/matrixPFASST/environment.yml b/pySDC/projects/matrixPFASST/environment.yml index 3d332962d4..e5da507653 100644 --- a/pySDC/projects/matrixPFASST/environment.yml +++ b/pySDC/projects/matrixPFASST/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/parallelSDC/environment.yml b/pySDC/projects/parallelSDC/environment.yml index 45f8cb089f..1961bb5c3b 100644 --- a/pySDC/projects/parallelSDC/environment.yml +++ b/pySDC/projects/parallelSDC/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/parallelSDC_reloaded/README.md b/pySDC/projects/parallelSDC_reloaded/README.rst similarity index 100% rename from pySDC/projects/parallelSDC_reloaded/README.md rename to pySDC/projects/parallelSDC_reloaded/README.rst diff --git a/pySDC/projects/parallelSDC_reloaded/environment.yml b/pySDC/projects/parallelSDC_reloaded/environment.yml index b0c4bd2123..540fdb25f2 100644 --- a/pySDC/projects/parallelSDC_reloaded/environment.yml +++ b/pySDC/projects/parallelSDC_reloaded/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/projects/soft_failure/environment.yml b/pySDC/projects/soft_failure/environment.yml index d9f0172d84..9f2b84243a 100644 --- a/pySDC/projects/soft_failure/environment.yml +++ b/pySDC/projects/soft_failure/environment.yml @@ -1,3 +1,5 @@ +--- + name: pySDC channels: - conda-forge diff --git a/pySDC/tests/README.rst b/pySDC/tests/README.rst index f471d61674..6678560816 100644 --- a/pySDC/tests/README.rst +++ b/pySDC/tests/README.rst @@ -7,5 +7,5 @@ Code coverage: |badge-cc| .. |badge-ga| image:: https://github.com/Parallel-in-Time/pySDC/actions/workflows/ci_pipeline.yml/badge.svg :target: https://github.com/Parallel-in-Time/pySDC/actions/workflows/ci_pipeline.yml -.. |badge-cc| image:: ./coverage/coverage-badge.svg +.. |badge-cc| image:: ./htmlcov/coverage-badge.svg :target: ./coverage/index.html \ No newline at end of file