Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into test_hackathon
Browse files Browse the repository at this point in the history
  • Loading branch information
lisawim committed Apr 11, 2024
2 parents bc751fd + b02181b commit e00236c
Show file tree
Hide file tree
Showing 49 changed files with 6,125 additions and 585 deletions.
49 changes: 48 additions & 1 deletion .github/workflows/ci_pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,53 @@ jobs:
path: |
data_libpressio
coverage_libpressio_3.10.dat
user_monodomain_tests_linux:
runs-on: ubuntu-latest

defaults:
run:
shell: bash -l {0}

steps:
- name: Checkout
uses: actions/checkout@v3

- name: Install Conda environment with Micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: "pySDC/projects/Monodomain/etc/environment-monodomain.yml"
create-args: >-
python=3.10
- name: Compile C++ ionic models
env:
IONIC_MODELS_PATH: "pySDC/projects/Monodomain/problem_classes/ionicmodels/cpp"
run: |
c++ -O3 -Wall -shared -std=c++11 -fPIC -fvisibility=hidden $(python3 -m pybind11 --includes) ${IONIC_MODELS_PATH}/bindings_definitions.cpp -o ${IONIC_MODELS_PATH}/ionicmodels$(python3-config --extension-suffix)
- name: Run pytest for CPU stuff
run: |
echo "print('Loading sitecustomize.py...')
import coverage
coverage.process_startup() " > sitecustomize.py
coverage run -m pytest --continue-on-collection-errors -v --durations=0 pySDC/tests -m monodomain
- name: Make coverage report
run: |
mv data data_monodomain
coverage combine
mv .coverage coverage_monodomain_3.10.dat
- name: Uploading artifacts
uses: actions/upload-artifact@v3
with:
name: cpu-test-artifacts
path: |
data_monodomain
coverage_monodomain_3.10.dat
# user_cpu_tests_macos:
# runs-on: macos-12
#
Expand Down Expand Up @@ -206,6 +252,7 @@ jobs:
- lint
- user_cpu_tests_linux
- user_libpressio_tests
- user_monodomain_tests_linux
# - wait_for_gitlab

defaults:
Expand Down
4 changes: 2 additions & 2 deletions CITATION.cff
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ authors:
orcid: https://orcid.org/0000-0002-8869-0784
affiliation: "Hamburg University of Technology, Institute of Mathematics, 21073 Hamburg, Germany"

version: 5.4.2
version: 5.4.3
doi: 10.5281/zenodo.594191
date-released: 2024-02-08
date-released: 2024-03-27
keywords:
- "parallel-in-time"
- "spectral deferred corrections"
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ The JU receives support from the European Union's Horizon 2020 research
and innovation programme and Belgium, France, Germany, and Switzerland.
This project also received funding from the [German Federal Ministry of
Education and Research](https://www.bmbf.de/bmbf/en/home/home_node.html)
(BMBF) grant 16HPC047. The project also received help from the
(BMBF) grants 16HPC047 and 16ME0679K. Supported by the European Union - NextGenerationEU. The project also received help from the
[Helmholtz Platform for Research Software Engineering - Preparatory
Study (HiRSE_PS)](https://www.helmholtz-hirse.de/).

Expand Down
2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
# The short X.Y version.
version = '5.4'
# The full version, including alpha/beta/rc tags.
release = '5.4.2'
release = '5.4.3'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
1 change: 1 addition & 0 deletions docs/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ Projects
projects/DAE.rst
projects/compression.rst
projects/second_order.rst
projects/monodomain.rst


API documentation
Expand Down
1 change: 1 addition & 0 deletions docs/source/projects/monodomain.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.. include:: /../../pySDC/projects/Monodomain/README.rst
16 changes: 15 additions & 1 deletion pySDC/core/Lagrange.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ class LagrangeApproximation(object):
The associated barycentric weights
"""

def __init__(self, points):
def __init__(self, points, fValues=None):
points = np.asarray(points).ravel()

diffs = points[:, None] - points[None, :]
Expand All @@ -110,6 +110,20 @@ def analytic(diffs):
self.points = points
self.weights = weights

# Store function values if provided
if fValues is not None:
fValues = np.asarray(fValues)
if fValues.shape != points.shape:
raise ValueError(f'fValues {fValues.shape} has not the correct shape: {points.shape}')
self.fValues = fValues

def __call__(self, t):
assert self.fValues is not None, "cannot evaluate polynomial without fValues"
t = np.asarray(t)
values = self.getInterpolationMatrix(t.ravel()).dot(self.fValues)
values.shape = t.shape
return values

@property
def n(self):
return self.points.size
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ def setup(self, controller, params, description, **kwargs):
'residual_max_tol': 1e9,
'maxiter': description['sweeper_params'].get('maxiter', 99),
'interpolate_between_restarts': True,
'abort_at_growing_residual': True,
**super().setup(controller, params, description, **kwargs),
}
if defaults['restol_rel']:
Expand All @@ -232,7 +233,12 @@ def determine_restart(self, controller, S, **kwargs):
self.trigger_restart_upon_nonconvergence(S)
elif self.get_local_error_estimate(controller, S, **kwargs) > self.params.e_tol:
S.status.restart = True
elif S.status.time_size == 1 and self.res_last_iter < S.levels[0].status.residual and S.status.iter > 0:
elif (
S.status.time_size == 1
and self.res_last_iter < S.levels[0].status.residual
and S.status.iter > 0
and self.params.abort_at_growing_residual
):
self.trigger_restart_upon_nonconvergence(S)
elif S.levels[0].status.residual > self.params.residual_max_tol:
self.trigger_restart_upon_nonconvergence(S)
Expand Down
79 changes: 79 additions & 0 deletions pySDC/implementations/hooks/log_solution.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
from pySDC.core.Hooks import hooks
import pickle
import os
import numpy as np


class LogSolution(hooks):
Expand Down Expand Up @@ -63,3 +66,79 @@ def post_iteration(self, step, level_number):
type='u',
value=L.uend,
)


class LogToFile(hooks):
r"""
Hook for logging the solution to file after the step using pickle.
Please configure the hook to your liking by manipulating class attributes.
You must set a custom path to a directory like so:
```
LogToFile.path = '/my/directory/'
```
Keep in mind that the hook will overwrite files without warning!
You can give a custom file name by setting the ``file_name`` class attribute and give a custom way of rendering the
index associated with individual files by giving a different lambda function ``format_index`` class attribute. This
lambda should accept one index and return one string.
You can also give a custom ``logging_condition`` lambda, accepting the current level if you want to log selectively.
Importantly, you may need to change ``process_solution``. By default, this will return a numpy view of the solution.
Of course, if you are not using numpy, you need to change this. Again, this is a lambda accepting the level.
After the fact, you can use the classmethod `get_path` to get the path to a certain data or the `load` function to
directly load the solution at a given index. Just configure the hook like you did when you recorded the data
beforehand.
Finally, be aware that using this hook with MPI parallel runs may lead to different tasks overwriting files. Make
sure to give a different `file_name` for each task that writes files.
"""

path = None
file_name = 'solution'
logging_condition = lambda L: True
process_solution = lambda L: {'t': L.time + L.dt, 'u': L.uend.view(np.ndarray)}
format_index = lambda index: f'{index:06d}'

def __init__(self):
super().__init__()
self.counter = 0

if self.path is None:
raise ValueError('Please set a path for logging as the class attribute `LogToFile.path`!')

if os.path.isfile(self.path):
raise ValueError(
f'{self.path!r} is not a valid path to log to because a file of the same name exists. Please supply a directory'
)

if not os.path.isdir(self.path):
os.mkdir(self.path)

def post_step(self, step, level_number):
if level_number > 0:
return None

L = step.levels[level_number]

if type(self).logging_condition(L):
path = self.get_path(self.counter)
data = type(self).process_solution(L)

with open(path, 'wb') as file:
pickle.dump(data, file)

self.counter += 1

@classmethod
def get_path(cls, index):
return f'{cls.path}/{cls.file_name}_{cls.format_index(index)}.pickle'

@classmethod
def load(cls, index):
path = cls.get_path(index)
with open(path, 'rb') as file:
return pickle.load(file)
Loading

0 comments on commit e00236c

Please sign in to comment.