diff --git a/modepy/matrices.py b/modepy/matrices.py index 307697c..57a4cc1 100644 --- a/modepy/matrices.py +++ b/modepy/matrices.py @@ -131,6 +131,12 @@ def multi_vandermonde( A sequence of matrices is returned--i.e. this function works directly on :func:`modepy.Basis.gradients` and returns a tuple of matrices. + + .. note:: + + Depending on the use case, it may be convenient to instead call + :func:`vandermonde` with the result of :meth:`Basis.derivatives`, + which is mathematically equivalent. """ nnodes = nodes.shape[-1] diff --git a/modepy/modes.py b/modepy/modes.py index 0d55728..05a3134 100644 --- a/modepy/modes.py +++ b/modepy/modes.py @@ -791,6 +791,28 @@ def gradients(self) -> tuple[BasisGradientType, ...]: Each function returns a tuple of derivatives, one per reference axis. """ + def derivatives(self, axis: int) -> tuple[BasisFunctionType, ...]: + """ + Returns a tuple of callable functions in the same order as + :meth:`functions` representing the same basis functions with a + derivative with respect to *axis* applied. + + .. note:: + + :meth:`gradients` and :meth:`derivatives` provide equivalent + functionality. Usage ergonomics are often 'nicer' for + :meth:`derivatives`. If *all* gradient data is desired + for simplices, calling :meth:`gradients` can be somewhat more + efficient because subexpressions can be reused across + components of the gradient. + + .. versionadded:: 2024.1.1 + """ + return tuple( + partial(lambda fi_inner, nodes: fi_inner(nodes)[axis], fi) + for fi in self.gradients + ) + # }}} @@ -885,13 +907,18 @@ def orthonormality_weight(self) -> float: raise BasisNotOrthonormal @property - def functions(self): + def functions(self) -> tuple[BasisFunctionType, ...]: return tuple(partial(monomial, mid) for mid in self.mode_ids) @property - def gradients(self): + def gradients(self) -> tuple[BasisGradientType, ...]: return tuple(partial(grad_monomial, mid) for mid in self.mode_ids) + def derivatives(self, axis: int) -> tuple[BasisFunctionType, ...]: + return tuple( + partial(diff_monomial, mid, axis) + for mid in self.mode_ids) + @basis_for_space.register(PN) def _basis_for_pn(space: PN, shape: Simplex): @@ -971,6 +998,15 @@ def _mode_index_tuples(self) -> tuple[tuple[int, ...], ...]: for mid in gnitb([len(b.functions) for b in self._bases[::-1]])) + @property + def _dim_ranges(self) -> tuple[tuple[int, int], ...]: + idim = 0 + result = [] + for dims in self._dims_per_basis: + result.append((idim, idim + dims)) + idim += dims + return tuple(result) + @property def mode_ids(self) -> tuple[Hashable, ...]: underlying_mode_id_lists = [basis.mode_ids for basis in self._bases] @@ -1028,6 +1064,21 @@ def gradients(self) -> tuple[BasisGradientType, ...]: dims_per_function=self._dims_per_basis) for mid in self._mode_index_tuples) + def derivatives(self, axis: int) -> tuple[BasisFunctionType, ...]: + bases = [b.functions for b in self._bases] + return tuple( + _TensorProductBasisFunction( + mid, + tuple( + ( + self._bases[ibasis].derivatives(axis-start_axis)[mid_i] + if start_axis <= axis < end_axis + else bases[ibasis][mid_i]) + for ibasis, (mid_i, (start_axis, end_axis)) + in enumerate(zip(mid, self._dim_ranges, strict=True))), + dims_per_function=self._dims_per_basis) + for mid in self._mode_index_tuples) + @orthonormal_basis_for_space.register(TensorProductSpace) def _orthonormal_basis_for_tp( diff --git a/modepy/test/test_modes.py b/modepy/test/test_modes.py index 04f8d8f..fb92365 100644 --- a/modepy/test/test_modes.py +++ b/modepy/test/test_modes.py @@ -178,7 +178,8 @@ def test_basis_grad(dim, shape_cls, order, basis_getter): from pytools import wandering_element from pytools.convergence import EOCRecorder - for bf, gradbf in zip(basis.functions, basis.gradients, strict=True): + for ifunc, (bf, gradbf) in enumerate( + zip(basis.functions, basis.gradients, strict=True)): eoc_rec = EOCRecorder() for h in [1e-2, 1e-3]: r = mp.random_nodes_for_shape(shape, nnodes=1000, rng=rng) @@ -198,6 +199,15 @@ def test_basis_grad(dim, shape_cls, order, basis_getter): logger.info("error: %.5e", err) eoc_rec.add_data_point(h, err) + for idiff_axis in range(shape.dim): + dr_bf = basis.derivatives(idiff_axis)[ifunc](r) + ref = gradbf_v[idiff_axis] + err = la.norm(dr_bf - ref, 2) + if la.norm(ref, 2) > 1e-13: + err = err/la.norm(ref, 2) + + assert err < 1e-13 + tol = 1e-8 if eoc_rec.max_error() >= tol: logger.info("\n%s", str(eoc_rec))