diff --git a/Project.toml b/Project.toml index e0bee298..60e4843e 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "SpectralFitting" uuid = "f2c56810-742e-4b72-8bf4-27af3bb81a12" authors = ["Fergus Baker "] -version = "0.5.8" +version = "0.5.9" [deps] ConstructionBase = "187b0558-2788-49d3-abe0-74a17ed4e7c9" diff --git a/src/abstract-models.jl b/src/abstract-models.jl index 9f5ce043..5f02712d 100644 --- a/src/abstract-models.jl +++ b/src/abstract-models.jl @@ -378,9 +378,5 @@ function make_diff_parameter_cache( N = isnothing(param_diff_cache_size) ? length(vals) : param_diff_cache_size diffcache = DiffCache(vals, ForwardDiff.pickchunksize(N)) - # embed current parameter values inside of the dual cache - # else all frozens will be zero - get_tmp(diffcache, ForwardDiff.Dual(one(eltype(vals)))) .= vals - - ParameterCache(free_mask, diffcache) + ParameterCache(free_mask, diffcache, vals[.!free_mask]) end diff --git a/src/param-cache.jl b/src/param-cache.jl index 09b640ac..3b01951c 100644 --- a/src/param-cache.jl +++ b/src/param-cache.jl @@ -1,9 +1,10 @@ # these should work for both single models and composite models # so that the parameters can all be allocated in one go # for multi model: give a view to each cache -struct ParameterCache{M<:AbstractArray,V} +struct ParameterCache{M<:AbstractArray,V,T<:Number} free_mask::M # bit vector or a view into one parameters::V + frozen_values::Vector{T} end function _make_free_mask(params::AbstractArray{<:FitParam}) @@ -16,15 +17,20 @@ end function ParameterCache(params::AbstractArray{<:FitParam}) free_mask = _make_free_mask(params) - ParameterCache(free_mask, map(get_value, params)) + frozen = params[.!free_mask] + ParameterCache(free_mask, map(get_value, params), map(get_value, frozen)) end -function _update_conditional!(parameters, mask, new_parameters, condition) +function _update_conditional!(parameters, mask, new_parameters, frozen) j::Int = 1 + k::Int = 1 for (i, free) in enumerate(mask) - if condition(free) + if free parameters[i] = new_parameters[j] j += 1 + else + parameters[i] = frozen[k] + k += 1 end end end @@ -35,15 +41,13 @@ _get_parameters(cache::ParameterCache{M,V}, params) where {M<:AbstractArray,V<:D function update_free_parameters!(cache::ParameterCache, params) @assert count(cache.free_mask) == length(params) - _update_conditional!(_get_parameters(cache, params), cache.free_mask, params, ==(true)) + _update_conditional!( + _get_parameters(cache, params), + cache.free_mask, + params, + cache.frozen_values, + ) cache end -function update_frozen_parameters!(cache::ParameterCache, params) - parameters = _get_parameters(cache, params) - @assert length(parameters) - count(cache.free_mask) == length(params) - _update_conditional!(parameters, cache.free_mask, params, ==(false)) - cache -end - -export ParameterCache, update_free_parameters!, update_frozen_parameters! +export ParameterCache, update_free_parameters! diff --git a/test/parameters/test-free-frozen.jl b/test/parameters/test-free-frozen.jl index 9226894a..f8695bea 100644 --- a/test/parameters/test-free-frozen.jl +++ b/test/parameters/test-free-frozen.jl @@ -7,5 +7,6 @@ model = DummyMultiplicative() * PowerLaw(a = FitParam(4.0, frozen = true)) cache = make_parameter_cache(model) SpectralFitting.update_free_parameters!(cache, [2.0, 0.5]) @test cache.parameters == [2.0, 4.0, 0.5, 5.0] -SpectralFitting.update_frozen_parameters!(cache, [50.0, 0.0]) -@test cache.parameters == [2.0, 50.0, 0.5, 0.0] + + +# TODO: test to check that auto diff gradients with various sizes can be propagated correctly