Skip to content

Commit

Permalink
Replace Turing.Model -> DynamicPPL.Model
Browse files Browse the repository at this point in the history
  • Loading branch information
penelopeysm committed Jan 21, 2025
1 parent c3e11af commit 2409aa1
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 3 deletions.
6 changes: 4 additions & 2 deletions src/mcmc/Inference.jl
Original file line number Diff line number Diff line change
Expand Up @@ -767,13 +767,15 @@ julia> [first(t.θ.x) for t in transitions] # extract samples for `x`
[-1.704630494695469]
```
"""
function transitions_from_chain(model::Turing.Model, chain::MCMCChains.Chains; kwargs...)
function transitions_from_chain(

Check warning on line 770 in src/mcmc/Inference.jl

View check run for this annotation

Codecov / codecov/patch

src/mcmc/Inference.jl#L770

Added line #L770 was not covered by tests
model::DynamicPPL.Model, chain::MCMCChains.Chains; kwargs...
)
return transitions_from_chain(Random.default_rng(), model, chain; kwargs...)
end

function transitions_from_chain(
rng::Random.AbstractRNG,
model::Turing.Model,
model::DynamicPPL.Model,
chain::MCMCChains.Chains;
sampler=DynamicPPL.SampleFromPrior(),
)
Expand Down
2 changes: 1 addition & 1 deletion test/mcmc/abstractmcmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ using Test: @test, @test_throws, @testset
using Turing
using Turing.Inference: AdvancedHMC

function initialize_nuts(model::Turing.Model)
function initialize_nuts(model::DynamicPPL.Model)
# Create a log-density function with an implementation of the
# gradient so we ensure that we're using the same AD backend as in Turing.
f = LogDensityProblemsAD.ADgradient(DynamicPPL.LogDensityFunction(model))
Expand Down

0 comments on commit 2409aa1

Please sign in to comment.