Skip to content
494 changes: 273 additions & 221 deletions docs/Manifest.toml

Large diffs are not rendered by default.

88 changes: 62 additions & 26 deletions examples/Cloudy/Cloudy_calibrate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ prior_N0 = constrained_gaussian(param_names[1], 400, 300, 0.4 * N0_true, Inf)
prior_θ = constrained_gaussian(param_names[2], 1.0, 5.0, 1e-1, Inf)
prior_k = constrained_gaussian(param_names[3], 0.2, 1.0, 1e-4, Inf)
priors = combine_distributions([prior_N0, prior_θ, prior_k])
# Plot the priors
p = plot(priors, constrained = false)
# Plots.Plot the priors
p = Plots.plot(priors, constrained = false)
savefig(p, output_directory * "cloudy_priors.png")

###
Expand Down Expand Up @@ -160,7 +160,7 @@ dummy = ones(n_params)
dist_type = ParticleDistributions.GammaPrimitiveParticleDistribution(dummy...)
model_settings = DynamicalModel.ModelSettings(kernel, dist_type, moments, tspan)
# EKI iterations
n_iter = N_iter
n_iter = [N_iter]
for n in 1:N_iter
# Return transformed parameters in physical/constrained space
ϕ_n = get_ϕ_final(priors, ekiobj)
Expand All @@ -169,12 +169,12 @@ for n in 1:N_iter
G_ens = hcat(G_n...) # reformat
terminate = EnsembleKalmanProcesses.update_ensemble!(ekiobj, G_ens)
if !isnothing(terminate)
n_iter = n - 1
n_iter[1] = n - 1
break
end

end

n_iter = n_iter[1]

# EKI results: Has the ensemble collapsed toward the truth?
θ_true = transform_constrained_to_unconstrained(priors, ϕ_true)
Expand Down Expand Up @@ -211,8 +211,14 @@ u_init = get_u_prior(ekiobj)
anim_eki_unconst_cloudy = @animate for i in 1:(n_iter - 1)
u_i = get_u(ekiobj, i)

p1 = plot(u_i[1, :], u_i[2, :], seriestype = :scatter, xlims = extrema(u_init[1, :]), ylims = extrema(u_init[2, :]))
plot!(
p1 = Plots.plot(
u_i[1, :],
u_i[2, :],
seriestype = :scatter,
xlims = extrema(u_init[1, :]),
ylims = extrema(u_init[2, :]),
)
Plots.plot!(
p1,
[θ_true[1]],
xaxis = "u1",
Expand All @@ -224,10 +230,16 @@ anim_eki_unconst_cloudy = @animate for i in 1:(n_iter - 1)
margin = 5mm,
title = "EKI iteration = " * string(i),
)
plot!(p1, [θ_true[2]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")

p2 = plot(u_i[2, :], u_i[3, :], seriestype = :scatter, xlims = extrema(u_init[2, :]), ylims = extrema(u_init[3, :]))
plot!(
Plots.plot!(p1, [θ_true[2]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")

p2 = Plots.plot(
u_i[2, :],
u_i[3, :],
seriestype = :scatter,
xlims = extrema(u_init[2, :]),
ylims = extrema(u_init[3, :]),
)
Plots.plot!(
p2,
[θ_true[2]],
xaxis = "u2",
Expand All @@ -240,10 +252,16 @@ anim_eki_unconst_cloudy = @animate for i in 1:(n_iter - 1)
title = "EKI iteration = " * string(i),
)

plot!(p2, [θ_true[3]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")
Plots.plot!(p2, [θ_true[3]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")

p3 = plot(u_i[3, :], u_i[1, :], seriestype = :scatter, xlims = extrema(u_init[3, :]), ylims = extrema(u_init[1, :]))
plot!(
p3 = Plots.plot(
u_i[3, :],
u_i[1, :],
seriestype = :scatter,
xlims = extrema(u_init[3, :]),
ylims = extrema(u_init[1, :]),
)
Plots.plot!(
p3,
[θ_true[3]],
xaxis = "u3",
Expand All @@ -256,9 +274,9 @@ anim_eki_unconst_cloudy = @animate for i in 1:(n_iter - 1)
title = "EKI iteration = " * string(i),
)

plot!(p3, [θ_true[1]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")
Plots.plot!(p3, [θ_true[1]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")

p = plot(p1, p2, p3, layout = (1, 3))
p = Plots.plot(p1, p2, p3, layout = (1, 3))
end

gif(anim_eki_unconst_cloudy, joinpath(output_directory, "cloudy_eki_unconstr.gif"), fps = 1) # hide
Expand All @@ -268,8 +286,14 @@ gif(anim_eki_unconst_cloudy, joinpath(output_directory, "cloudy_eki_unconstr.gif
anim_eki_cloudy = @animate for i in 1:(n_iter - 1)
ϕ_i = get_ϕ(priors, ekiobj, i)

p1 = plot(ϕ_i[1, :], ϕ_i[2, :], seriestype = :scatter, xlims = extrema(ϕ_init[1, :]), ylims = extrema(ϕ_init[2, :]))
plot!(
p1 = Plots.plot(
ϕ_i[1, :],
ϕ_i[2, :],
seriestype = :scatter,
xlims = extrema(ϕ_init[1, :]),
ylims = extrema(ϕ_init[2, :]),
)
Plots.plot!(
p1,
[ϕ_true[1]],
xaxis = "ϕ1",
Expand All @@ -282,11 +306,17 @@ anim_eki_cloudy = @animate for i in 1:(n_iter - 1)
title = "EKI iteration = " * string(i),
)

plot!(p1, [ϕ_true[2]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")
Plots.plot!(p1, [ϕ_true[2]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")

p2 = plot(ϕ_i[2, :], ϕ_i[3, :], seriestype = :scatter, xlims = extrema(ϕ_init[2, :]), ylims = extrema(ϕ_init[3, :]))
p2 = Plots.plot(
ϕ_i[2, :],
ϕ_i[3, :],
seriestype = :scatter,
xlims = extrema(ϕ_init[2, :]),
ylims = extrema(ϕ_init[3, :]),
)

plot!(
Plots.plot!(
p2,
[ϕ_true[2]],
xaxis = "ϕ2",
Expand All @@ -299,10 +329,16 @@ anim_eki_cloudy = @animate for i in 1:(n_iter - 1)
title = "EKI iteration = " * string(i),
)

plot!(p2, [ϕ_true[3]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")
Plots.plot!(p2, [ϕ_true[3]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")

p3 = plot(ϕ_i[3, :], ϕ_i[1, :], seriestype = :scatter, xlims = extrema(ϕ_init[3, :]), ylims = extrema(ϕ_init[1, :]))
plot!(
p3 = Plots.plot(
ϕ_i[3, :],
ϕ_i[1, :],
seriestype = :scatter,
xlims = extrema(ϕ_init[3, :]),
ylims = extrema(ϕ_init[1, :]),
)
Plots.plot!(
p3,
[ϕ_true[3]],
xaxis = "ϕ3",
Expand All @@ -314,9 +350,9 @@ anim_eki_cloudy = @animate for i in 1:(n_iter - 1)
label = false,
title = "EKI iteration = " * string(i),
)
plot!(p3, [ϕ_true[1]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")
Plots.plot!(p3, [ϕ_true[1]], seriestype = "hline", linestyle = :dash, linecolor = :red, label = "optimum")

p = plot(p1, p2, p3, layout = (1, 3))
p = Plots.plot(p1, p2, p3, layout = (1, 3))

end

Expand Down
4 changes: 3 additions & 1 deletion examples/Cloudy/Cloudy_emulate_sample.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ using CalibrateEmulateSample.Emulators
using CalibrateEmulateSample.MarkovChainMonteCarlo
using CalibrateEmulateSample.Utilities
using CalibrateEmulateSample.EnsembleKalmanProcesses
using CalibrateEmulateSample.ParameterDistributions
using CalibrateEmulateSample.DataContainers

################################################################################
# #
Expand Down Expand Up @@ -90,8 +92,8 @@ function main()

cases = [
"rf-scalar",
"gp-gpjl", # Veeeery slow predictions
"rf-nonsep",
"gp-gpjl", # Veeeery slow predictions
]

# Specify cases to run (e.g., case_mask = [2] only runs the second case)
Expand Down
17 changes: 9 additions & 8 deletions examples/Emulator/G-function/emulate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ function main()

rng = MersenneTwister(seed)

n_repeats = 5# repeat exp with same data.
n_dimensions = 10
n_repeats = 10# repeat exp with same data.
n_dimensions = 20
# To create the sampling
n_data_gen = 800

Expand Down Expand Up @@ -81,7 +81,8 @@ function main()
input[:, i] = samples[ind[i], :]
output[i] = y[ind[i]] + noise[i]
end
encoder_schedule = (decorrelate_structure_mat(), "out")
# encoder_schedule = (decorrelate_structure_mat(), "out")
encoder_schedule = (minmax_scale(), "in_and_out")
iopairs = PairedDataContainer(input, output)

# analytic sobol indices taken from
Expand All @@ -99,12 +100,11 @@ function main()
overrides = Dict(
"verbose" => true,
"scheduler" => DataMisfitController(terminate_at = 1e2),
"n_features_opt" => 120,
"n_features_opt" => 100,#120,
"n_iteration" => 10,
"cov_sample_multiplier" => 3.0,
#"localization" => SEC(0.1),#,Doesnt help much tbh
"cov_sample_multiplier" => 0.1,
"n_ensemble" => 100, #40*n_dimensions,
"n_cross_val_sets" => 4,
"n_cross_val_sets" => 2,
)
if case == "Prior"
# don't do anything
Expand All @@ -126,7 +126,8 @@ function main()

elseif case ∈ ["RF-scalar", "Prior"]
rank = n_dimensions #<= 10 ? n_dimensions : 10
kernel_structure = SeparableKernel(LowRankFactor(rank, nugget), OneDimFactor())
# kernel_structure = SeparableKernel(LowRankFactor(rank, nugget), OneDimFactor())
kernel_structure = SeparableKernel(DiagonalFactor(nugget), OneDimFactor())
n_features = n_dimensions <= 10 ? n_dimensions * 100 : 1000
if (n_features / n_train_pts > 0.9) && (n_features / n_train_pts < 1.1)
@warn "The number of features similar to the number of training points, poor performance expected, change one or other of these"
Expand Down
4 changes: 2 additions & 2 deletions examples/Emulator/G-function/plot_result.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ function main()
output_directory = "output"
cases = ["Prior", "GP", "RF-scalar"]
case = cases[3]
n_dimensions = 3
filename = joinpath(output_directory, "Gfunction_$(case)_$(n_dimensions).jld2")
n_dimensions = 20
filename = joinpath(output_directory, "GFunction_$(case)_$(n_dimensions).jld2")
legend = true

(
Expand Down
11 changes: 6 additions & 5 deletions examples/Emulator/Ishigami/emulate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ function main()

rng = MersenneTwister(seed)

n_repeats = 30 # repeat exp with same data.
n_repeats = 10 # repeat exp with same data.

# To create the sampling
n_data_gen = 2000
Expand Down Expand Up @@ -82,14 +82,15 @@ function main()
end
iopairs = PairedDataContainer(input, output)
cases = ["Prior", "GP", "RF-scalar"]
case = cases[2]
case = cases[3]
encoder_schedule = (decorrelate_structure_mat(), "out")
nugget = Float64(1e-4)
nugget = Float64(1e-8)
overrides = Dict(
"verbose" => true,
"scheduler" => DataMisfitController(terminate_at = 1e4),
"n_features_opt" => 150,
"n_features_opt" => 100,
"n_ensemble" => 50,
"n_iteration" => 20,
"n_iteration" => 10,
)

if case == "Prior"
Expand Down
2 changes: 1 addition & 1 deletion examples/Emulator/Ishigami/plot_result.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ function main()

output_directory = "output"
cases = ["Prior", "GP", "RF-scalar"]
case = cases[2]
case = cases[3]
filename = joinpath(output_directory, "results_$case.jld2")

(sobol_pts, train_idx, mlt_pred_y, mlt_sobol, analytic_sobol, true_y, noise_sample, noise_cov, estimated_sobol) =
Expand Down
44 changes: 12 additions & 32 deletions examples/Emulator/L63/emulate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ function main()
# rng
rng = MersenneTwister(1232435)

n_repeats = 1 # repeat exp with same data.
n_repeats = 5 # repeat exp with same data.
println("run experiment $n_repeats times")

#for later plots
Expand Down Expand Up @@ -96,20 +96,11 @@ function main()


# Emulate
cases = [
"GP",
"RF-prior",
"RF-scalar",
"RF-scalar-diagin",
"RF-svd-nonsep",
"RF-nosvd-nonsep",
"RF-nosvd-sep",
"RF-svd-sep",
]

case = cases[3] # 7

nugget = Float64(1e-8)
cases = ["GP", "RF-prior", "RF-lr-scalar", "rf-diag-scalar", "RF-lr-lr", "RF-nonsep"]

case = cases[4] # 7

nugget = Float64(1e-4)
u_test = []
u_hist = []
train_err = []
Expand Down Expand Up @@ -148,10 +139,10 @@ function main()
optimizer_options = rf_optimizer_overrides,
)

elseif case ∈ ["RF-scalar", "RF-scalar-diagin"]
elseif case ∈ ["RF-lr-scalar", "rf-diag-scalar"]
n_features = 10 * Int(floor(sqrt(3 * n_tp)))
kernel_structure =
case == "RF-scalar-diagin" ? SeparableKernel(DiagonalFactor(nugget), OneDimFactor()) :
case == "rf-diag-scalar" ? SeparableKernel(DiagonalFactor(nugget), OneDimFactor()) :
SeparableKernel(LowRankFactor(3, nugget), OneDimFactor())
mlt = ScalarRandomFeatureInterface(
n_features,
Expand All @@ -160,7 +151,7 @@ function main()
kernel_structure = kernel_structure,
optimizer_options = rf_optimizer_overrides,
)
elseif case ∈ ["RF-svd-nonsep"]
elseif case ∈ ["RF-nonsep"]
kernel_structure = NonseparableKernel(LowRankFactor(4, nugget))
n_features = 500

Expand All @@ -172,18 +163,7 @@ function main()
kernel_structure = kernel_structure,
optimizer_options = rf_optimizer_overrides,
)
elseif case ∈ ["RF-nosvd-nonsep"]
kernel_structure = NonseparableKernel(LowRankFactor(4, nugget))
n_features = 500
mlt = VectorRandomFeatureInterface(
n_features,
3,
3,
rng = rng,
kernel_structure = kernel_structure,
optimizer_options = rf_optimizer_overrides,
)
elseif case ∈ ["RF-nosvd-sep", "RF-svd-sep"]
elseif case ∈ ["RF-lr-lr"]
kernel_structure = SeparableKernel(LowRankFactor(3, nugget), LowRankFactor(1, nugget))
n_features = 500
mlt = VectorRandomFeatureInterface(
Expand All @@ -210,7 +190,7 @@ function main()
end

# Emulate
if case ∈ ["RF-nosvd-nonsep", "RF-nosvd-sep"]
if case ∈ ["RF-nonsep"]
encoder_schedule = [] # no encoding
else
encoder_schedule = (decorrelate_structure_mat(), "out")
Expand All @@ -224,7 +204,7 @@ function main()
optimize_hyperparameters!(emulator)

# diagnostics
if case ∈ ["RF-svd-nonsep", "RF-nosvd-nonsep", "RF-svd-sep"]
if case ∈ ["RF-nonsep" "RF-lr-lr"]
push!(opt_diagnostics, get_optimizer(mlt)[1]) #length-1 vec of vec -> vec
end

Expand Down
2 changes: 2 additions & 0 deletions examples/Emulator/Regression_2d_2d/Project.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
[deps]
CalibrateEmulateSample = "95e48a1f-0bec-4818-9538-3db4340308e3"
ColorSchemes = "35d6a980-a343-548e-a6ea-1d62b119f2f4"
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"
JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
PyPlot = "d330b81b-6aea-500a-939a-2ce795aea3ee"
Expand Down
Loading
Loading