Skip to content

Commit

Permalink
remove n_montecarlo option in the inference tests and just fix it
Browse files Browse the repository at this point in the history
  • Loading branch information
Red-Portal committed Dec 4, 2024
1 parent eda4ea0 commit b6083ed
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion test/inference/repgradelbo_distributionsad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ end
@testset "$(modelname) $(objname) $(realtype) $(adbackname)" for realtype in
[Float64, Float32],
(modelname, modelconstr) in Dict(:Normal => normal_meanfield),
n_montecarlo in [10],
(objname, objective) in Dict(
:RepGradELBOClosedFormEntropy => RepGradELBO(n_montecarlo),
:RepGradELBOStickingTheLanding =>
Expand All @@ -31,6 +30,7 @@ end
T = 1000
η = 1e-3
opt = Optimisers.Descent(realtype(η))
n_montecarlo = 10

# For small enough η, the error of SGD, Δλ, is bounded as
# Δλ ≤ ρ^T Δλ0 + O(η),
Expand Down
2 changes: 1 addition & 1 deletion test/inference/repgradelbo_locationscale.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ end
[Float64, Float32],
(modelname, modelconstr) in
Dict(:Normal => normal_meanfield, :Normal => normal_fullrank),
n_montecarlo in [10],
(objname, objective) in Dict(
:RepGradELBOClosedFormEntropy => RepGradELBO(n_montecarlo),
:RepGradELBOStickingTheLanding =>
Expand All @@ -32,6 +31,7 @@ end
T = 1000
η = 1e-3
opt = Optimisers.Descent(realtype(η))
n_montecarlo = 10

# For small enough η, the error of SGD, Δλ, is bounded as
# Δλ ≤ ρ^T Δλ0 + O(η),
Expand Down
2 changes: 1 addition & 1 deletion test/inference/repgradelbo_locationscale_bijectors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ end
[Float64, Float32],
(modelname, modelconstr) in
Dict(:NormalLogNormalMeanField => normallognormal_meanfield),
n_montecarlo in [10],
(objname, objective) in Dict(
:RepGradELBOClosedFormEntropy => RepGradELBO(n_montecarlo),
:RepGradELBOStickingTheLanding =>
Expand All @@ -32,6 +31,7 @@ end
T = 1000
η = 1e-3
opt = Optimisers.Descent(realtype(η))
n_montecarlo = 10

b = Bijectors.bijector(model)
b⁻¹ = inverse(b)
Expand Down
2 changes: 1 addition & 1 deletion test/inference/scoregradelbo_distributionsad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ end
@testset "$(modelname) $(objname) $(realtype) $(adbackname)" for realtype in
[Float64, Float32],
(modelname, modelconstr) in Dict(:Normal => normal_meanfield),
n_montecarlo in [10],
(objname, objective) in Dict(:ScoreGradELBO => ScoreGradELBO(n_montecarlo)),
(adbackname, adtype) in AD_scoregradelbo_distributionsad

Expand All @@ -27,6 +26,7 @@ end
T = 1000
η = 1e-4
opt = Optimisers.Descent(realtype(η))
n_montecarlo = 10

# For small enough η, the error of SGD, Δλ, is bounded as
# Δλ ≤ ρ^T Δλ0 + O(η),
Expand Down
2 changes: 1 addition & 1 deletion test/inference/scoregradelbo_locationscale.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ end
[Float64, Float32],
(modelname, modelconstr) in
Dict(:Normal => normal_meanfield, :Normal => normal_fullrank),
n_montecarlo in [10],
(objname, objective) in Dict(:ScoreGradELBO => ScoreGradELBO(n_montecarlo)),
(adbackname, adtype) in AD_scoregradelbo_locationscale

Expand All @@ -28,6 +27,7 @@ end
T = 1000
η = 1e-4
opt = Optimisers.Descent(realtype(η))
n_montecarlo = 10

# For small enough η, the error of SGD, Δλ, is bounded as
# Δλ ≤ ρ^T Δλ0 + O(η),
Expand Down
2 changes: 1 addition & 1 deletion test/inference/scoregradelbo_locationscale_bijectors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ end
[Float64, Float32],
(modelname, modelconstr) in
Dict(:NormalLogNormalMeanField => normallognormal_meanfield),
n_montecarlo in [10],
(objname, objective) in Dict(:ScoreGradELBO => ScoreGradELBO(n_montecarlo)),
(adbackname, adtype) in AD_scoregradelbo_locationscale_bijectors

Expand All @@ -28,6 +27,7 @@ end
T = 1000
η = 1e-4
opt = Optimisers.Descent(realtype(η))
n_montecarlo = 10

b = Bijectors.bijector(model)
b⁻¹ = inverse(b)
Expand Down

0 comments on commit b6083ed

Please sign in to comment.