From 3d14e53a1fc125843f76de4a6d2e0e2cb80556b0 Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Thu, 17 Nov 2022 16:06:28 +0000 Subject: [PATCH 1/5] use similar in creation of diffresults buffer --- src/DiffResults_helpers.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/DiffResults_helpers.jl b/src/DiffResults_helpers.jl index 9ee4e1a..322fa28 100644 --- a/src/DiffResults_helpers.jl +++ b/src/DiffResults_helpers.jl @@ -14,7 +14,7 @@ Allocate a DiffResults buffer for a gradient, taking the element type of `x` int function _diffresults_buffer(ℓ, x) T = eltype(x) S = T <: Real ? float(Real) : Float64 # heuristic - DiffResults.MutableDiffResult(zero(S), (Vector{S}(undef, dimension(ℓ)), )) + DiffResults.MutableDiffResult(zero(S), (similar(x, S, dimension(ℓ)), )) end """ @@ -25,5 +25,6 @@ constructed with [`diffresults_buffer`](@ref). Gradient is not copied as caller vector. """ function _diffresults_extract(diffresult::DiffResults.DiffResult) + # NOTE: Is this still needed? DiffResults.value(diffresult)::Real, DiffResults.gradient(diffresult) end From 01d27450dbf3f77d89f3734c9551e7eff10e0b85 Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Thu, 17 Nov 2022 17:15:34 +0000 Subject: [PATCH 2/5] use the input to make the DiffResults buffer --- src/DiffResults_helpers.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/DiffResults_helpers.jl b/src/DiffResults_helpers.jl index 322fa28..d593c26 100644 --- a/src/DiffResults_helpers.jl +++ b/src/DiffResults_helpers.jl @@ -14,7 +14,7 @@ Allocate a DiffResults buffer for a gradient, taking the element type of `x` int function _diffresults_buffer(ℓ, x) T = eltype(x) S = T <: Real ? float(Real) : Float64 # heuristic - DiffResults.MutableDiffResult(zero(S), (similar(x, S, dimension(ℓ)), )) + DiffResults.MutableDiffResult(zero(S), (similar(x, S), )) end """ From 00af7139e22168cfe14954b662e4bc54ed7bf091 Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Sat, 19 Nov 2022 04:07:28 +0000 Subject: [PATCH 3/5] remove the logdensity argument from _diffresults_buffer --- src/AD_ForwardDiff.jl | 2 +- src/AD_ReverseDiff.jl | 2 +- src/DiffResults_helpers.jl | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/AD_ForwardDiff.jl b/src/AD_ForwardDiff.jl index 3b20070..aab4522 100644 --- a/src/AD_ForwardDiff.jl +++ b/src/AD_ForwardDiff.jl @@ -45,7 +45,7 @@ end function logdensity_and_gradient(fℓ::ForwardDiffLogDensity, x::AbstractVector) @unpack ℓ, gradientconfig = fℓ - buffer = _diffresults_buffer(ℓ, x) + buffer = _diffresults_buffer(x) result = ForwardDiff.gradient!(buffer, Base.Fix1(logdensity, ℓ), x, gradientconfig) _diffresults_extract(result) end diff --git a/src/AD_ReverseDiff.jl b/src/AD_ReverseDiff.jl index 07e0b10..91f07ef 100644 --- a/src/AD_ReverseDiff.jl +++ b/src/AD_ReverseDiff.jl @@ -50,7 +50,7 @@ end function logdensity_and_gradient(∇ℓ::ReverseDiffLogDensity, x::AbstractVector) @unpack ℓ, compiledtape = ∇ℓ - buffer = _diffresults_buffer(ℓ, x) + buffer = _diffresults_buffer(x) if compiledtape === nothing result = ReverseDiff.gradient!(buffer, Base.Fix1(logdensity, ℓ), x) else diff --git a/src/DiffResults_helpers.jl b/src/DiffResults_helpers.jl index d593c26..419282b 100644 --- a/src/DiffResults_helpers.jl +++ b/src/DiffResults_helpers.jl @@ -11,7 +11,7 @@ $(SIGNATURES) Allocate a DiffResults buffer for a gradient, taking the element type of `x` into account (heuristically). """ -function _diffresults_buffer(ℓ, x) +function _diffresults_buffer(x) T = eltype(x) S = T <: Real ? float(Real) : Float64 # heuristic DiffResults.MutableDiffResult(zero(S), (similar(x, S), )) From 963430803b0e3d768218771c38b7429c37f4275a Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Tue, 22 Nov 2022 09:51:44 +0000 Subject: [PATCH 4/5] removed type-annotation --- src/DiffResults_helpers.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/DiffResults_helpers.jl b/src/DiffResults_helpers.jl index 419282b..3624ef4 100644 --- a/src/DiffResults_helpers.jl +++ b/src/DiffResults_helpers.jl @@ -25,6 +25,5 @@ constructed with [`diffresults_buffer`](@ref). Gradient is not copied as caller vector. """ function _diffresults_extract(diffresult::DiffResults.DiffResult) - # NOTE: Is this still needed? - DiffResults.value(diffresult)::Real, DiffResults.gradient(diffresult) + DiffResults.value(diffresult), DiffResults.gradient(diffresult) end From 39dd7c130f8c3da9f6978aae5542d7f316cbfed1 Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Tue, 22 Nov 2022 09:52:28 +0000 Subject: [PATCH 5/5] patch version bump --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 4de0239..ff33a90 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "LogDensityProblems" uuid = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" authors = ["Tamas K. Papp "] -version = "1.0.2" +version = "1.0.3" [deps] ArgCheck = "dce04be8-c92d-5529-be00-80e4d2c0e197"