From 17ab101d4dc62382bfd83f449d3ca602e20c23ca Mon Sep 17 00:00:00 2001 From: Patrick Ersing <114223904+patrickersing@users.noreply.github.com> Date: Thu, 21 Mar 2024 16:46:03 +0100 Subject: [PATCH 1/7] Extend TimeSeriesCallback for TreeMesh1D/3D (#1873) * add TimeSeriesCallback support for TreeMesh1D * add TimeSeriesCallback support for TreeMesh3D * add testing * update tests * reorganize files structure for TimeSeriesCallback * update test values for julia 1.9 * update news.md * rename time_series_dg2d.jl, add comments from code review * apply formatter --- NEWS.md | 3 +- .../elixir_euler_source_terms.jl | 3 + .../elixir_euler_source_terms.jl | 5 + src/callbacks_step/time_series.jl | 6 +- src/callbacks_step/time_series_dg.jl | 37 ++++ src/callbacks_step/time_series_dg_tree.jl | 185 ++++++++++++++++++ ...dg2d.jl => time_series_dg_unstructured.jl} | 119 +---------- test/test_tree_1d_euler.jl | 17 +- test/test_tree_3d_euler.jl | 37 +++- 9 files changed, 288 insertions(+), 124 deletions(-) create mode 100644 src/callbacks_step/time_series_dg_tree.jl rename src/callbacks_step/{time_series_dg2d.jl => time_series_dg_unstructured.jl} (76%) diff --git a/NEWS.md b/NEWS.md index 5b08d51ab89..022252e61a9 100644 --- a/NEWS.md +++ b/NEWS.md @@ -7,7 +7,8 @@ for human readability. ## Changes in the v0.7 lifecycle #### Added -- Implementation of `TimeSeriesCallback` for curvilinear meshes on `UnstructuredMesh2D`. +- Implementation of `TimeSeriesCallback` for curvilinear meshes on `UnstructuredMesh2D` and extension + to 1D and 3D on `TreeMesh`. ## Changes when updating to v0.7 from v0.6.x diff --git a/examples/tree_1d_dgsem/elixir_euler_source_terms.jl b/examples/tree_1d_dgsem/elixir_euler_source_terms.jl index 555910f69f0..cb8a09057d9 100644 --- a/examples/tree_1d_dgsem/elixir_euler_source_terms.jl +++ b/examples/tree_1d_dgsem/elixir_euler_source_terms.jl @@ -44,9 +44,12 @@ save_solution = SaveSolutionCallback(interval = 100, stepsize_callback = StepsizeCallback(cfl = 0.8) +time_series = TimeSeriesCallback(semi, [0.0, 0.33, 1.0], interval = 10) + callbacks = CallbackSet(summary_callback, analysis_callback, alive_callback, save_solution, + time_series, stepsize_callback) ############################################################################### diff --git a/examples/tree_3d_dgsem/elixir_euler_source_terms.jl b/examples/tree_3d_dgsem/elixir_euler_source_terms.jl index f0246c30490..021fd09f316 100644 --- a/examples/tree_3d_dgsem/elixir_euler_source_terms.jl +++ b/examples/tree_3d_dgsem/elixir_euler_source_terms.jl @@ -41,9 +41,14 @@ save_solution = SaveSolutionCallback(interval = 100, stepsize_callback = StepsizeCallback(cfl = 0.6) +time_series = TimeSeriesCallback(semi, + [(0.0, 0.0, 0.0), (0.33, 0.33, 0.33), (1.0, 1.0, 1.0)], + interval = 10) + callbacks = CallbackSet(summary_callback, analysis_callback, alive_callback, save_solution, + time_series, stepsize_callback) ############################################################################### diff --git a/src/callbacks_step/time_series.jl b/src/callbacks_step/time_series.jl index f6d76f0fb15..ae18c85700d 100644 --- a/src/callbacks_step/time_series.jl +++ b/src/callbacks_step/time_series.jl @@ -23,8 +23,7 @@ After the last time step, the results are stored in an HDF5 file `filename` in d The real data type `RealT` and data type for solution variables `uEltype` default to the respective types used in the solver and the cache. -Currently this callback is only implemented for [`TreeMesh`](@ref) in 2D -and [`UnstructuredMesh2D`](@ref). +Currently this callback is only implemented for [`TreeMesh`](@ref) and [`UnstructuredMesh2D`](@ref). """ mutable struct TimeSeriesCallback{RealT <: Real, uEltype <: Real, SolutionVariables, VariableNames, Cache} @@ -218,5 +217,6 @@ function (time_series_callback::TimeSeriesCallback)(integrator) end include("time_series_dg.jl") -include("time_series_dg2d.jl") +include("time_series_dg_tree.jl") +include("time_series_dg_unstructured.jl") end # @muladd diff --git a/src/callbacks_step/time_series_dg.jl b/src/callbacks_step/time_series_dg.jl index ae394afbbfd..3781a10662d 100644 --- a/src/callbacks_step/time_series_dg.jl +++ b/src/callbacks_step/time_series_dg.jl @@ -34,4 +34,41 @@ function save_time_series_file(time_series_callback, end end end + +# Creates cache for time series callback +function create_cache_time_series(point_coordinates, + mesh::Union{TreeMesh, UnstructuredMesh2D}, + dg, cache) + # Determine element ids for point coordinates + element_ids = get_elements_by_coordinates(point_coordinates, mesh, dg, cache) + + # Calculate & store Lagrange interpolation polynomials + interpolating_polynomials = calc_interpolating_polynomials(point_coordinates, + element_ids, mesh, + dg, cache) + + time_series_cache = (; element_ids, interpolating_polynomials) + + return time_series_cache +end + +function get_elements_by_coordinates(coordinates, mesh, dg, cache) + element_ids = Vector{Int}(undef, size(coordinates, 2)) + get_elements_by_coordinates!(element_ids, coordinates, mesh, dg, cache) + + return element_ids +end + +function calc_interpolating_polynomials(coordinates, element_ids, + mesh::Union{TreeMesh, UnstructuredMesh2D}, + dg, cache) + interpolating_polynomials = Array{real(dg), 3}(undef, + nnodes(dg), ndims(mesh), + length(element_ids)) + calc_interpolating_polynomials!(interpolating_polynomials, coordinates, element_ids, + mesh, dg, + cache) + + return interpolating_polynomials +end end # @muladd diff --git a/src/callbacks_step/time_series_dg_tree.jl b/src/callbacks_step/time_series_dg_tree.jl new file mode 100644 index 00000000000..37d4e6ea705 --- /dev/null +++ b/src/callbacks_step/time_series_dg_tree.jl @@ -0,0 +1,185 @@ +# By default, Julia/LLVM does not use fused multiply-add operations (FMAs). +# Since these FMAs can increase the performance of many numerical algorithms, +# we need to opt-in explicitly. +# See https://ranocha.de/blog/Optimizing_EC_Trixi for further details. +@muladd begin +#! format: noindent + +# Find element ids containing coordinates given as a matrix [ndims, npoints] +function get_elements_by_coordinates!(element_ids, coordinates, mesh::TreeMesh, dg, + cache) + if length(element_ids) != size(coordinates, 2) + throw(DimensionMismatch("storage length for element ids does not match the number of coordinates")) + end + + @unpack cell_ids = cache.elements + @unpack tree = mesh + + # Reset element ids - 0 indicates "not (yet) found" + element_ids .= 0 + found_elements = 0 + + # Iterate over all elements + for element in eachelement(dg, cache) + # Get cell id + cell_id = cell_ids[element] + + # Iterate over coordinates + for index in 1:length(element_ids) + # Skip coordinates for which an element has already been found + if element_ids[index] > 0 + continue + end + + # Construct point + x = SVector(ntuple(i -> coordinates[i, index], ndims(mesh))) + + # Skip if point is not in cell + if !is_point_in_cell(tree, x, cell_id) + continue + end + + # Otherwise point is in cell and thus in element + element_ids[index] = element + found_elements += 1 + end + + # Exit loop if all elements have already been found + if found_elements == length(element_ids) + break + end + end + + return element_ids +end + +# Calculate the interpolating polynomials to extract data at the given coordinates +# The coordinates are known to be located in the respective element in `element_ids` +function calc_interpolating_polynomials!(interpolating_polynomials, coordinates, + element_ids, + mesh::TreeMesh, dg::DGSEM, cache) + @unpack tree = mesh + @unpack nodes = dg.basis + + wbary = barycentric_weights(nodes) + + for index in 1:length(element_ids) + # Construct point + x = SVector(ntuple(i -> coordinates[i, index], ndims(mesh))) + + # Convert to unit coordinates + cell_id = cache.elements.cell_ids[element_ids[index]] + cell_coordinates_ = cell_coordinates(tree, cell_id) + cell_length = length_at_cell(tree, cell_id) + unit_coordinates = (x .- cell_coordinates_) * 2 / cell_length + + # Calculate interpolating polynomial for each dimension, making use of tensor product structure + for d in 1:ndims(mesh) + interpolating_polynomials[:, d, index] .= lagrange_interpolating_polynomials(unit_coordinates[d], + nodes, + wbary) + end + end + + return interpolating_polynomials +end + +# Record the solution variables at each given point for the 1D case +function record_state_at_points!(point_data, u, solution_variables, + n_solution_variables, + mesh::TreeMesh{1}, equations, dg::DG, + time_series_cache) + @unpack element_ids, interpolating_polynomials = time_series_cache + old_length = length(first(point_data)) + new_length = old_length + n_solution_variables + + # Loop over all points/elements that should be recorded + for index in 1:length(element_ids) + # Extract data array and element id + data = point_data[index] + element_id = element_ids[index] + + # Make room for new data to be recorded + resize!(data, new_length) + data[(old_length + 1):new_length] .= zero(eltype(data)) + + # Loop over all nodes to compute their contribution to the interpolated values + for i in eachnode(dg) + u_node = solution_variables(get_node_vars(u, equations, dg, i, + element_id), equations) + + for v in 1:length(u_node) + data[old_length + v] += (u_node[v] * + interpolating_polynomials[i, 1, index]) + end + end + end +end + +# Record the solution variables at each given point for the 2D case +function record_state_at_points!(point_data, u, solution_variables, + n_solution_variables, + mesh::TreeMesh{2}, + equations, dg::DG, time_series_cache) + @unpack element_ids, interpolating_polynomials = time_series_cache + old_length = length(first(point_data)) + new_length = old_length + n_solution_variables + + # Loop over all points/elements that should be recorded + for index in 1:length(element_ids) + # Extract data array and element id + data = point_data[index] + element_id = element_ids[index] + + # Make room for new data to be recorded + resize!(data, new_length) + data[(old_length + 1):new_length] .= zero(eltype(data)) + + # Loop over all nodes to compute their contribution to the interpolated values + for j in eachnode(dg), i in eachnode(dg) + u_node = solution_variables(get_node_vars(u, equations, dg, i, j, + element_id), equations) + + for v in 1:length(u_node) + data[old_length + v] += (u_node[v] + * interpolating_polynomials[i, 1, index] + * interpolating_polynomials[j, 2, index]) + end + end + end +end + +# Record the solution variables at each given point for the 3D case +function record_state_at_points!(point_data, u, solution_variables, + n_solution_variables, + mesh::TreeMesh{3}, equations, dg::DG, + time_series_cache) + @unpack element_ids, interpolating_polynomials = time_series_cache + old_length = length(first(point_data)) + new_length = old_length + n_solution_variables + + # Loop over all points/elements that should be recorded + for index in 1:length(element_ids) + # Extract data array and element id + data = point_data[index] + element_id = element_ids[index] + + # Make room for new data to be recorded + resize!(data, new_length) + data[(old_length + 1):new_length] .= zero(eltype(data)) + + # Loop over all nodes to compute their contribution to the interpolated values + for k in eachnode(dg), j in eachnode(dg), i in eachnode(dg) + u_node = solution_variables(get_node_vars(u, equations, dg, i, j, k, + element_id), equations) + + for v in 1:length(u_node) + data[old_length + v] += (u_node[v] + * interpolating_polynomials[i, 1, index] + * interpolating_polynomials[j, 2, index] + * interpolating_polynomials[k, 3, index]) + end + end + end +end +end # @muladd diff --git a/src/callbacks_step/time_series_dg2d.jl b/src/callbacks_step/time_series_dg_unstructured.jl similarity index 76% rename from src/callbacks_step/time_series_dg2d.jl rename to src/callbacks_step/time_series_dg_unstructured.jl index ad7c6851c80..f6d1bb48f24 100644 --- a/src/callbacks_step/time_series_dg2d.jl +++ b/src/callbacks_step/time_series_dg_unstructured.jl @@ -5,71 +5,6 @@ @muladd begin #! format: noindent -# Creates cache for time series callback -function create_cache_time_series(point_coordinates, - mesh::Union{TreeMesh{2}, UnstructuredMesh2D}, - dg, cache) - # Determine element ids for point coordinates - element_ids = get_elements_by_coordinates(point_coordinates, mesh, dg, cache) - - # Calculate & store Lagrange interpolation polynomials - interpolating_polynomials = calc_interpolating_polynomials(point_coordinates, - element_ids, mesh, - dg, cache) - - time_series_cache = (; element_ids, interpolating_polynomials) - - return time_series_cache -end - -# Find element ids containing coordinates given as a matrix [ndims, npoints] -function get_elements_by_coordinates!(element_ids, coordinates, mesh::TreeMesh, dg, - cache) - if length(element_ids) != size(coordinates, 2) - throw(DimensionMismatch("storage length for element ids does not match the number of coordinates")) - end - - @unpack cell_ids = cache.elements - @unpack tree = mesh - - # Reset element ids - 0 indicates "not (yet) found" - element_ids .= 0 - found_elements = 0 - - # Iterate over all elements - for element in eachelement(dg, cache) - # Get cell id - cell_id = cell_ids[element] - - # Iterate over coordinates - for index in 1:length(element_ids) - # Skip coordinates for which an element has already been found - if element_ids[index] > 0 - continue - end - - # Construct point - x = SVector(ntuple(i -> coordinates[i, index], ndims(mesh))) - - # Skip if point is not in cell - if !is_point_in_cell(tree, x, cell_id) - continue - end - - # Otherwise point is in cell and thus in element - element_ids[index] = element - found_elements += 1 - end - - # Exit loop if all elements have already been found - if found_elements == length(element_ids) - break - end - end - - return element_ids -end - # Elements on an `UnstructuredMesh2D` are possibly curved. Assume that each # element is convex, i.e., all interior angles are less than 180 degrees. # This routine computes the shortest distance from a given point to each element @@ -208,44 +143,6 @@ function calc_minimum_surface_distance(point, node_coordinates, return min_distance2, indices end -function get_elements_by_coordinates(coordinates, mesh, dg, cache) - element_ids = Vector{Int}(undef, size(coordinates, 2)) - get_elements_by_coordinates!(element_ids, coordinates, mesh, dg, cache) - - return element_ids -end - -# Calculate the interpolating polynomials to extract data at the given coordinates -# The coordinates are known to be located in the respective element in `element_ids` -function calc_interpolating_polynomials!(interpolating_polynomials, coordinates, - element_ids, - mesh::TreeMesh, dg::DGSEM, cache) - @unpack tree = mesh - @unpack nodes = dg.basis - - wbary = barycentric_weights(nodes) - - for index in 1:length(element_ids) - # Construct point - x = SVector(ntuple(i -> coordinates[i, index], ndims(mesh))) - - # Convert to unit coordinates - cell_id = cache.elements.cell_ids[element_ids[index]] - cell_coordinates_ = cell_coordinates(tree, cell_id) - cell_length = length_at_cell(tree, cell_id) - unit_coordinates = (x .- cell_coordinates_) * 2 / cell_length - - # Calculate interpolating polynomial for each dimension, making use of tensor product structure - for d in 1:ndims(mesh) - interpolating_polynomials[:, d, index] .= lagrange_interpolating_polynomials(unit_coordinates[d], - nodes, - wbary) - end - end - - return interpolating_polynomials -end - function calc_interpolating_polynomials!(interpolating_polynomials, coordinates, element_ids, mesh::UnstructuredMesh2D, dg::DGSEM, cache) @@ -374,23 +271,9 @@ end return SVector(xi, eta) end -function calc_interpolating_polynomials(coordinates, element_ids, - mesh::Union{TreeMesh, UnstructuredMesh2D}, - dg, cache) - interpolating_polynomials = Array{real(dg), 3}(undef, - nnodes(dg), ndims(mesh), - length(element_ids)) - calc_interpolating_polynomials!(interpolating_polynomials, coordinates, element_ids, - mesh, dg, - cache) - - return interpolating_polynomials -end - -# Record the solution variables at each given point function record_state_at_points!(point_data, u, solution_variables, n_solution_variables, - mesh::Union{TreeMesh{2}, UnstructuredMesh2D}, + mesh::UnstructuredMesh2D, equations, dg::DG, time_series_cache) @unpack element_ids, interpolating_polynomials = time_series_cache old_length = length(first(point_data)) diff --git a/test/test_tree_1d_euler.jl b/test/test_tree_1d_euler.jl index f26500b411c..784d123128e 100644 --- a/test/test_tree_1d_euler.jl +++ b/test/test_tree_1d_euler.jl @@ -21,7 +21,10 @@ EXAMPLES_DIR = pkgdir(Trixi, "examples", "tree_1d_dgsem") 1.6205433861493646e-7, 1.465427772462391e-7, 5.372255111879554e-7, - ]) + ], + # With the default `maxiters = 1` in coverage tests, + # there would be no time series to check against. + coverage_override=(maxiters = 20,)) # Ensure that we do not have excessive memory allocations # (e.g., from type instabilities) let @@ -30,6 +33,18 @@ EXAMPLES_DIR = pkgdir(Trixi, "examples", "tree_1d_dgsem") du_ode = similar(u_ode) @test (@allocated Trixi.rhs!(du_ode, u_ode, semi, t)) < 1000 end + # Extra test to make sure the "TimeSeriesCallback" made correct data. + # Extracts data at all points from the first step of the time series and compares it to the + # exact solution and an interpolated reference solution + point_data = [getindex(time_series.affect!.point_data[i], 1:3) for i in 1:3] + exact_data = [initial_condition_convergence_test(time_series.affect!.point_coordinates[i], + time_series.affect!.time[1], + equations) for i in 1:3] + ref_data = [[1.968279088772251, 1.9682791565395945, 3.874122958278797], + [2.0654816955822017, 2.0654817326611883, 4.26621471136323], + [2.0317209235018936, 2.0317209516429506, 4.127889808862571]] + @test point_data≈exact_data atol=1e-6 + @test point_data ≈ ref_data end @trixi_testset "elixir_euler_convergence_pure_fv.jl" begin diff --git a/test/test_tree_3d_euler.jl b/test/test_tree_3d_euler.jl index e9e2b82fec5..47669dce2fb 100644 --- a/test/test_tree_3d_euler.jl +++ b/test/test_tree_3d_euler.jl @@ -25,7 +25,10 @@ EXAMPLES_DIR = pkgdir(Trixi, "examples", "tree_3d_dgsem") 0.032179231640894645, 0.032179231640895534, 0.0655408023333299, - ]) + ], + # With the default `maxiters = 1` in coverage tests, + # there would be no time series to check against. + coverage_override=(maxiters = 20,)) # Ensure that we do not have excessive memory allocations # (e.g., from type instabilities) let @@ -34,6 +37,38 @@ EXAMPLES_DIR = pkgdir(Trixi, "examples", "tree_3d_dgsem") du_ode = similar(u_ode) @test (@allocated Trixi.rhs!(du_ode, u_ode, semi, t)) < 1000 end + # Extra test to make sure the "TimeSeriesCallback" made correct data. + # Extracts data at all points from the first step of the time series and compares it to the + # exact solution and an interpolated reference solution + point_data = [getindex(time_series.affect!.point_data[i], 1:5) for i in 1:3] + exact_data = [initial_condition_convergence_test(time_series.affect!.point_coordinates[:, + i], + time_series.affect!.time[1], + equations) for i in 1:3] + ref_data = [ + [ + 1.951156832316166, + 1.952073047561595, + 1.9520730475615966, + 1.9520730475615953, + 3.814390510967551, + ], + [ + 2.0506452262144363, + 2.050727319703708, + 2.0507273197037073, + 2.0507273197037077, + 4.203653999433724, + ], + [ + 2.046982357537558, + 2.0463728824399654, + 2.0463728824399654, + 2.0463728824399645, + 4.190033459318115, + ]] + @test point_data≈exact_data atol=1e-1 + @test point_data ≈ ref_data end @trixi_testset "elixir_euler_convergence_pure_fv.jl" begin From 18aaae96035a995e840e4e262964e7b49fdd9325 Mon Sep 17 00:00:00 2001 From: Joshua Lampert <51029046+JoshuaLampert@users.noreply.github.com> Date: Thu, 21 Mar 2024 19:56:02 +0100 Subject: [PATCH 2/7] Add ExplicitImports.jl test (#1875) * add ExplicitImports.jl tests * format * use Trixi.at-batch in at-threaded and skipt at-batch in stale explicit imports test --------- Co-authored-by: Michael Schlottke-Lakemper --- Project.toml | 2 -- src/Trixi.jl | 13 ++++++------- src/auxiliary/auxiliary.jl | 2 ++ test/Project.toml | 2 ++ test/test_aqua.jl | 9 +++++++++ 5 files changed, 19 insertions(+), 9 deletions(-) diff --git a/Project.toml b/Project.toml index 97da4aec51b..0e960a06a38 100644 --- a/Project.toml +++ b/Project.toml @@ -31,7 +31,6 @@ RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" Requires = "ae029012-a4dd-5104-9daa-d747884805df" SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" -Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46" SimpleUnPack = "ce78b400-467f-4804-87d8-8f486da07d0a" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" StartUpDG = "472ebc20-7c99-4d4b-9470-8fde4e9faa0f" @@ -84,7 +83,6 @@ RecipesBase = "1.1" Reexport = "1.0" Requires = "1.1" SciMLBase = "1.90, 2" -Setfield = "1" SimpleUnPack = "1.1" SparseArrays = "1" StartUpDG = "0.17.7" diff --git a/src/Trixi.jl b/src/Trixi.jl index da7359999c5..9375c80d77e 100644 --- a/src/Trixi.jl +++ b/src/Trixi.jl @@ -22,7 +22,7 @@ using LinearAlgebra: LinearAlgebra, Diagonal, diag, dot, mul!, norm, cross, norm UniformScaling, det using Printf: @printf, @sprintf, println using SparseArrays: AbstractSparseMatrix, AbstractSparseMatrixCSC, sparse, droptol!, - rowvals, nzrange, nonzeros, spzeros + rowvals, nzrange, nonzeros # import @reexport now to make it available for further imports/exports using Reexport: @reexport @@ -32,10 +32,10 @@ using Reexport: @reexport using MPI: MPI using SciMLBase: CallbackSet, DiscreteCallback, - ODEProblem, ODESolution, ODEFunction, + ODEProblem, ODESolution, SplitODEProblem import SciMLBase: get_du, get_tmp_cache, u_modified!, - AbstractODEIntegrator, init, step!, check_error, + init, step!, check_error, get_proposed_dt, set_proposed_dt!, terminate!, remake, add_tstop!, has_tstop, first_tstop @@ -57,7 +57,6 @@ using Polyester: Polyester, @batch # You know, the cheapest threads you can find using OffsetArrays: OffsetArray, OffsetVector using P4est using T8code -using Setfield: @set using RecipesBase: RecipesBase using Requires: @require using Static: Static, One, True, False @@ -66,7 +65,7 @@ using StaticArrays: StaticArrays, MVector, MArray, SMatrix, @SMatrix using StrideArrays: PtrArray, StrideArray, StaticInt @reexport using StructArrays: StructArrays, StructArray using TimerOutputs: TimerOutputs, @notimeit, TimerOutput, print_timer, reset_timer! -using Triangulate: Triangulate, TriangulateIO, triangulate +using Triangulate: Triangulate, TriangulateIO export TriangulateIO # for type parameter in DGMultiMesh using TriplotBase: TriplotBase using TriplotRecipes: DGTriPseudocolor @@ -84,9 +83,9 @@ const _PREFERENCE_LOG = @load_preference("log", "log_Trixi_NaN") # finite difference SBP operators using SummationByPartsOperators: AbstractDerivativeOperator, - AbstractNonperiodicDerivativeOperator, DerivativeOperator, + AbstractNonperiodicDerivativeOperator, AbstractPeriodicDerivativeOperator, - PeriodicDerivativeOperator, grid + grid import SummationByPartsOperators: integrate, semidiscretize, compute_coefficients, compute_coefficients!, left_boundary_weight, right_boundary_weight diff --git a/src/auxiliary/auxiliary.jl b/src/auxiliary/auxiliary.jl index 92da9a5ba8b..972a748c56b 100644 --- a/src/auxiliary/auxiliary.jl +++ b/src/auxiliary/auxiliary.jl @@ -242,6 +242,8 @@ macro threaded(expr) # !!! danger "Heisenbug" # Look at the comments for `wrap_array` when considering to change this macro. + # By using `Trixi.@batch` we allow users of Trixi.jl to use `@threaded` without having + # Polyester.jl in their namespace. return esc(quote Trixi.@batch $(expr) end) diff --git a/test/Project.toml b/test/Project.toml index 1a042dab44f..1491d7a5c5f 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -2,6 +2,7 @@ Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +ExplicitImports = "7d51a73a-1435-4ff3-83d9-f097790105c7" FFMPEG = "c87230d0-a227-11e9-1b43-d7ebe4e7570a" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" @@ -16,6 +17,7 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Aqua = "0.8" CairoMakie = "0.10" Downloads = "1" +ExplicitImports = "1.0.1" FFMPEG = "0.4" ForwardDiff = "0.10.24" LinearAlgebra = "1" diff --git a/test/test_aqua.jl b/test/test_aqua.jl index 93457caba28..04c4a533d26 100644 --- a/test/test_aqua.jl +++ b/test/test_aqua.jl @@ -1,6 +1,7 @@ module TestAqua using Aqua +using ExplicitImports: check_no_implicit_imports, check_no_stale_explicit_imports using Test using Trixi @@ -13,6 +14,14 @@ include("test_trixi.jl") # in src/solvers/dgmulti/sbp.jl piracies = (treat_as_own = [Trixi.StartUpDG.RefElemData, Trixi.StartUpDG.MeshData],)) + @test isnothing(check_no_implicit_imports(Trixi, + skip = (Core, Base, Trixi.P4est, Trixi.T8code, + Trixi.EllipsisNotation))) + @test isnothing(check_no_stale_explicit_imports(Trixi, + ignore = (:derivative_operator, + :periodic_derivative_operator, + :upwind_operators, + Symbol("@batch")))) end end #module From f10969548615547e520623a9fb351a41bd952065 Mon Sep 17 00:00:00 2001 From: Hendrik Ranocha Date: Fri, 22 Mar 2024 12:40:51 +0100 Subject: [PATCH 3/7] Create SECURITY.md (#1884) * Create SECURITY.md * link to Julia SemVer --- SECURITY.md | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000000..faa84a770bc --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,30 @@ +# Security Policy + +The Trixi.jl development team takes security issues seriously. We appreciate +all efforts to responsibly disclose any security issues and will make every +effort to acknowledge contributions. + + +## Supported Versions + +The current stable release following the interpretation of +[semantic versioning (SemVer)](https://julialang.github.io/Pkg.jl/dev/compatibility/#Version-specifier-format-1) +used in the Julia ecosystem is supported with security updates. + + +## Reporting a Vulnerability + +To report a security issue, please use the GitHub Security Advisory +["Report a Vulnerability"](https://github.com/trixi-framework/Trixi.jl/security/advisories/new) +tab. + +We will send a response indicating the next steps in handling your report. +After the initial reply to your report, we will keep you informed of the +progress towards a fix and full announcement, and may ask for additional +information or guidance. + +Please report security bugs in third-party modules directly to the person +or team maintaining the module. + +Public notifications of vulnerabilities will be shared in community channels +such as Slack. From f4cb1e0e88fe6fac456d55a619d79e4a4d8d265e Mon Sep 17 00:00:00 2001 From: Hendrik Ranocha Date: Fri, 22 Mar 2024 12:56:20 +0100 Subject: [PATCH 4/7] add OpenSSF best practices badge (#1885) * add OpenSSF best practices badge * remove broken Genie badge --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 71370d3478e..86a8514a5ba 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,8 @@ [![Aqua QA](https://raw.githubusercontent.com/JuliaTesting/Aqua.jl/master/badge.svg)](https://github.com/JuliaTesting/Aqua.jl) [![License: MIT](https://img.shields.io/badge/License-MIT-success.svg)](https://opensource.org/licenses/MIT) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3996439.svg)](https://doi.org/10.5281/zenodo.3996439) -[![Downloads](https://shields.io/endpoint?url=https://pkgs.genieframework.com/api/v1/badge/Trixi)](https://pkgs.genieframework.com?packages=Trixi) +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8695/badge)](https://www.bestpractices.dev/projects/8695) + From e31e25928d6e51f1e84d1437840346edf4e944a6 Mon Sep 17 00:00:00 2001 From: Hendrik Ranocha Date: Fri, 22 Mar 2024 17:56:06 +0100 Subject: [PATCH 5/7] Update contributing.md (#1883) * Update contributing.md * copy contributing from root directory to docs --- CONTRIBUTING.md | 9 ++++--- docs/.gitignore | 2 ++ docs/make.jl | 43 ++++++++++++++++++++++---------- docs/src/contributing.md | 54 ---------------------------------------- 4 files changed, 38 insertions(+), 70 deletions(-) delete mode 100644 docs/src/contributing.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c3ad581062e..c40c40bb18e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,9 +1,12 @@ # Contributing Trixi.jl is an open-source project and we are very happy to accept contributions -from the community. Please feel free to open issues or submit patches (preferably -as pull requests) any time. For planned larger contributions, it is often -beneficial to get in contact with one of the principal developers first (see +from the community. Please feel free to +[open issues](https://github.com/trixi-framework/Trixi.jl/issues/new/choose) +or submit patches (preferably as +[pull requests](https://github.com/trixi-framework/Trixi.jl/pulls)) +any time. For planned larger contributions, it is often beneficial to get +in contact with one of the principal developers first (see [AUTHORS.md](AUTHORS.md)). Trixi.jl and its contributions are licensed under the MIT license (see diff --git a/docs/.gitignore b/docs/.gitignore index 01f3ac8d79a..c8a9e842246 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -1 +1,3 @@ src/code_of_conduct.md +src/contributing.md + diff --git a/docs/make.jl b/docs/make.jl index 8427c4049bf..f752a7b0ee6 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -28,19 +28,36 @@ DocMeta.setdocmeta!(Trixi2Vtk, :DocTestSetup, :(using Trixi2Vtk); recursive=true # as necessary # Based on: https://github.com/ranocha/SummationByPartsOperators.jl/blob/0206a74140d5c6eb9921ca5021cb7bf2da1a306d/docs/make.jl#L27-L41 open(joinpath(@__DIR__, "src", "code_of_conduct.md"), "w") do io - # Point to source license file - println(io, """ - ```@meta - EditURL = "https://github.com/trixi-framework/Trixi.jl/blob/main/CODE_OF_CONDUCT.md" - ``` - """) - # Write the modified contents - println(io, "# [Code of Conduct](@id code-of-conduct)") - println(io, "") - for line in eachline(joinpath(dirname(@__DIR__), "CODE_OF_CONDUCT.md")) - line = replace(line, "[AUTHORS.md](AUTHORS.md)" => "[Authors](@ref)") - println(io, "> ", line) - end + # Point to source license file + println(io, + """ + ```@meta + EditURL = "https://github.com/trixi-framework/Trixi.jl/blob/main/CODE_OF_CONDUCT.md" + ``` + """) + # Write the modified contents + println(io, "# [Code of Conduct](@id code-of-conduct)") + println(io, "") + for line in eachline(joinpath(dirname(@__DIR__), "CODE_OF_CONDUCT.md")) + line = replace(line, "[AUTHORS.md](AUTHORS.md)" => "[Authors](@ref)") + println(io, "> ", line) + end +end + +open(joinpath(@__DIR__, "src", "contributing.md"), "w") do io + # Point to source license file + println(io, + """ + ```@meta + EditURL = "https://github.com/trixi-framework/Trixi.jl/blob/main/CONTRIBUTING.md" + ``` + """) + # Write the modified contents + for line in eachline(joinpath(dirname(@__DIR__), "CONTRIBUTING.md")) + line = replace(line, "[LICENSE.md](LICENSE.md)" => "[License](@ref)") + line = replace(line, "[AUTHORS.md](AUTHORS.md)" => "[Authors](@ref)") + println(io, line) + end end # Create tutorials for the following files: diff --git a/docs/src/contributing.md b/docs/src/contributing.md deleted file mode 100644 index 5f996477215..00000000000 --- a/docs/src/contributing.md +++ /dev/null @@ -1,54 +0,0 @@ -# Contributing - -Trixi.jl is an open-source project and we are very happy to accept contributions -from the community. Please feel free to open issues or submit patches (preferably -as merge requests) any time. For planned larger contributions, it is often -beneficial to get in contact with one of the principal developers first (see -[Authors](@ref)). - -Trixi.jl and its contributions are licensed under the MIT license (see -[License](@ref)). As a contributor, you certify that all your -contributions are in conformance with the *Developer Certificate of Origin -(Version 1.1)*, which is reproduced below. - -## Developer Certificate of Origin (Version 1.1) -The following text was taken from -[https://developercertificate.org](https://developercertificate.org): - - Developer Certificate of Origin - Version 1.1 - - Copyright (C) 2004, 2006 The Linux Foundation and its contributors. - 1 Letterman Drive - Suite D4700 - San Francisco, CA, 94129 - - Everyone is permitted to copy and distribute verbatim copies of this - license document, but changing it is not allowed. - - - Developer's Certificate of Origin 1.1 - - By making a contribution to this project, I certify that: - - (a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - - (b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - - (c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - - (d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. From 9f330400a9b62473a5c695f1faedabb487c6c8b1 Mon Sep 17 00:00:00 2001 From: Hendrik Ranocha Date: Fri, 22 Mar 2024 17:57:31 +0100 Subject: [PATCH 6/7] set version to v0.7.4 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 0e960a06a38..ada2b40bf0a 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "Trixi" uuid = "a7f1ee26-1774-49b1-8366-f1abc58fbfcb" authors = ["Michael Schlottke-Lakemper ", "Gregor Gassner ", "Hendrik Ranocha ", "Andrew R. Winters ", "Jesse Chan "] -version = "0.7.4-pre" +version = "0.7.4" [deps] CodeTracking = "da1fd8a2-8d9e-5ec2-8556-3022fb5608a2" From 01032aaa78f6140d7200f50d1011b3c7cd0c9c9f Mon Sep 17 00:00:00 2001 From: Hendrik Ranocha Date: Fri, 22 Mar 2024 17:57:46 +0100 Subject: [PATCH 7/7] set development version to v0.7.5-pre --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index ada2b40bf0a..27df49ed4fa 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "Trixi" uuid = "a7f1ee26-1774-49b1-8366-f1abc58fbfcb" authors = ["Michael Schlottke-Lakemper ", "Gregor Gassner ", "Hendrik Ranocha ", "Andrew R. Winters ", "Jesse Chan "] -version = "0.7.4" +version = "0.7.5-pre" [deps] CodeTracking = "da1fd8a2-8d9e-5ec2-8556-3022fb5608a2"