Skip to content

Commit b6083ed

Browse files
committed
remove n_montecarlo option in the inference tests and just fix it
1 parent eda4ea0 commit b6083ed

6 files changed

+6
-6
lines changed

test/inference/repgradelbo_distributionsad.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ end
1414
@testset "$(modelname) $(objname) $(realtype) $(adbackname)" for realtype in
1515
[Float64, Float32],
1616
(modelname, modelconstr) in Dict(:Normal => normal_meanfield),
17-
n_montecarlo in [10],
1817
(objname, objective) in Dict(
1918
:RepGradELBOClosedFormEntropy => RepGradELBO(n_montecarlo),
2019
:RepGradELBOStickingTheLanding =>
@@ -31,6 +30,7 @@ end
3130
T = 1000
3231
η = 1e-3
3332
opt = Optimisers.Descent(realtype(η))
33+
n_montecarlo = 10
3434

3535
# For small enough η, the error of SGD, Δλ, is bounded as
3636
# Δλ ≤ ρ^T Δλ0 + O(η),

test/inference/repgradelbo_locationscale.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ end
1515
[Float64, Float32],
1616
(modelname, modelconstr) in
1717
Dict(:Normal => normal_meanfield, :Normal => normal_fullrank),
18-
n_montecarlo in [10],
1918
(objname, objective) in Dict(
2019
:RepGradELBOClosedFormEntropy => RepGradELBO(n_montecarlo),
2120
:RepGradELBOStickingTheLanding =>
@@ -32,6 +31,7 @@ end
3231
T = 1000
3332
η = 1e-3
3433
opt = Optimisers.Descent(realtype(η))
34+
n_montecarlo = 10
3535

3636
# For small enough η, the error of SGD, Δλ, is bounded as
3737
# Δλ ≤ ρ^T Δλ0 + O(η),

test/inference/repgradelbo_locationscale_bijectors.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ end
1515
[Float64, Float32],
1616
(modelname, modelconstr) in
1717
Dict(:NormalLogNormalMeanField => normallognormal_meanfield),
18-
n_montecarlo in [10],
1918
(objname, objective) in Dict(
2019
:RepGradELBOClosedFormEntropy => RepGradELBO(n_montecarlo),
2120
:RepGradELBOStickingTheLanding =>
@@ -32,6 +31,7 @@ end
3231
T = 1000
3332
η = 1e-3
3433
opt = Optimisers.Descent(realtype(η))
34+
n_montecarlo = 10
3535

3636
b = Bijectors.bijector(model)
3737
b⁻¹ = inverse(b)

test/inference/scoregradelbo_distributionsad.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ end
1414
@testset "$(modelname) $(objname) $(realtype) $(adbackname)" for realtype in
1515
[Float64, Float32],
1616
(modelname, modelconstr) in Dict(:Normal => normal_meanfield),
17-
n_montecarlo in [10],
1817
(objname, objective) in Dict(:ScoreGradELBO => ScoreGradELBO(n_montecarlo)),
1918
(adbackname, adtype) in AD_scoregradelbo_distributionsad
2019

@@ -27,6 +26,7 @@ end
2726
T = 1000
2827
η = 1e-4
2928
opt = Optimisers.Descent(realtype(η))
29+
n_montecarlo = 10
3030

3131
# For small enough η, the error of SGD, Δλ, is bounded as
3232
# Δλ ≤ ρ^T Δλ0 + O(η),

test/inference/scoregradelbo_locationscale.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ end
1515
[Float64, Float32],
1616
(modelname, modelconstr) in
1717
Dict(:Normal => normal_meanfield, :Normal => normal_fullrank),
18-
n_montecarlo in [10],
1918
(objname, objective) in Dict(:ScoreGradELBO => ScoreGradELBO(n_montecarlo)),
2019
(adbackname, adtype) in AD_scoregradelbo_locationscale
2120

@@ -28,6 +27,7 @@ end
2827
T = 1000
2928
η = 1e-4
3029
opt = Optimisers.Descent(realtype(η))
30+
n_montecarlo = 10
3131

3232
# For small enough η, the error of SGD, Δλ, is bounded as
3333
# Δλ ≤ ρ^T Δλ0 + O(η),

test/inference/scoregradelbo_locationscale_bijectors.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ end
1515
[Float64, Float32],
1616
(modelname, modelconstr) in
1717
Dict(:NormalLogNormalMeanField => normallognormal_meanfield),
18-
n_montecarlo in [10],
1918
(objname, objective) in Dict(:ScoreGradELBO => ScoreGradELBO(n_montecarlo)),
2019
(adbackname, adtype) in AD_scoregradelbo_locationscale_bijectors
2120

@@ -28,6 +27,7 @@ end
2827
T = 1000
2928
η = 1e-4
3029
opt = Optimisers.Descent(realtype(η))
30+
n_montecarlo = 10
3131

3232
b = Bijectors.bijector(model)
3333
b⁻¹ = inverse(b)

0 commit comments

Comments
 (0)