Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
9cb30f6
EKP-based approximate dim-reduction
odunbar May 3, 2025
0472a5f
save diagnostic matrices
odunbar May 7, 2025
308257b
estimate_posteriors.jl
odunbar May 8, 2025
e542a07
small changes
odunbar May 8, 2025
5924c7f
working with full G
odunbar May 8, 2025
acd8269
scale norms
odunbar May 8, 2025
877c352
reorganize
odunbar May 8, 2025
3a9435a
return diagnostic mats lines
odunbar May 10, 2025
aab7c22
nice formatting
odunbar May 10, 2025
a44d5bd
Add H_u^y
ArneBouillon May 12, 2025
144553e
Add MCMC sampling; add first version of Hgy; add Lorenz; refactor code
ArneBouillon Jun 5, 2025
c526551
Rename data folder to escape .gitignore
ArneBouillon Jun 5, 2025
cba2e0b
Remove debugging line
ArneBouillon Jun 5, 2025
f1e21f1
Also build diagnostic matrices with MCMC final samples
ArneBouillon Jun 5, 2025
557bafa
add StatsPlots
odunbar Jun 6, 2025
61f2f26
more pkg additions
odunbar Jun 6, 2025
715a7da
Add temperature parameter to step1 MCMC
ArneBouillon Jun 6, 2025
a9df3b0
Add finite-difference Jacobian for Lorenz
ArneBouillon Jun 6, 2025
1e6de70
Use prior for entire space (as I think we should)
ArneBouillon Jun 7, 2025
1126784
Fix bug
ArneBouillon Jun 7, 2025
674ea13
Add linear model, forward-model marginalization, and Huy diagnostic m…
ArneBouillon Jun 9, 2025
2c59ae5
Fix silly performance bug and make step3_num_marginalization_samples …
ArneBouillon Jun 10, 2025
acb3f31
Re-add EKS sampling in step 3 (only with :forward_model marginalization)
ArneBouillon Jun 10, 2025
964171f
Add true parameter as initial guess to MCMC
ArneBouillon Jun 10, 2025
70a9283
Refactor step2 to reduce code duplication
ArneBouillon Jun 10, 2025
2b910e1
Set up larger experiment
ArneBouillon Jun 11, 2025
1079f16
Add LinLinExp problem and fix bug in Huy_ekp_final
ArneBouillon Jun 11, 2025
51e05c5
Fix observation-informed output diagnostics
ArneBouillon Jun 11, 2025
ab58616
Format
ArneBouillon Jun 11, 2025
2c657b9
Fix initial noise to fix Lorenz Jacobian bug
ArneBouillon Jun 12, 2025
0adcd6d
Fix Huy bug
ArneBouillon Jun 12, 2025
48a5321
Add linear-regression diagnostics
ArneBouillon Jun 12, 2025
9726485
Rewrite step 2; add diagnostics at different αs; add localsl and egi
ArneBouillon Jul 3, 2025
9c6fa4f
Center Lorenz prior
ArneBouillon Jul 9, 2025
fa9c515
Use gradients for manifold optimization
ArneBouillon Aug 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions examples/DimensionReduction/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[deps]
AdvancedMH = "5b7e9947-ddc0-4b3f-9b55-0d8042f74170"
ChunkSplitters = "ae650224-84b6-46f8-82ea-d812ca08434e"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
EnsembleKalmanProcesses = "aa8a2aa5-91d8-4396-bcef-d4f2ec43552d"
FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
Manifolds = "1cead3c2-87b3-11e9-0ccd-23c62b72b94e"
Manopt = "0fc0a36d-df90-57f3-8f93-d78a9fc72bb5"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
StatsPlots = "f3b207a7-027a-5e70-b257-86293d7955fd"
Empty file.
Empty file.
1 change: 1 addition & 0 deletions examples/DimensionReduction/problems/forward_maps.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
abstract type ForwardMapType end
52 changes: 52 additions & 0 deletions examples/DimensionReduction/problems/problem_linear.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
using LinearAlgebra
using EnsembleKalmanProcesses
using EnsembleKalmanProcesses.ParameterDistributions
using Statistics
using Distributions

include("forward_maps.jl")

function linear(input_dim, output_dim, rng)
# prior
γ0 = 4.0
β_γ = -2
Γ = Diagonal([γ0 * (1.0 * j)^β_γ for j in 1:input_dim])
prior_dist = MvNormal(zeros(input_dim), Γ)
prior = ParameterDistribution(
Dict(
"distribution" => Parameterized(prior_dist),
"constraint" => repeat([no_constraint()], input_dim),
"name" => "param_$(input_dim)",
),
)

U = qr(randn(rng, (output_dim, output_dim))).Q
V = qr(randn(rng, (input_dim, input_dim))).Q
λ0 = 100.0
β_λ = -1
Λ = Diagonal([λ0 * (1.0 * j)^β_λ for j in 1:output_dim])
A = U * Λ * V[1:output_dim, :] # output x input
model = Linear(input_dim, output_dim, A)

# generate data sample
obs_noise_cov = Diagonal([Float64(j)^(-1 / 2) for j in 1:output_dim])
noise = rand(rng, MvNormal(zeros(output_dim), obs_noise_cov))
# true_parameter = reshape(ones(input_dim), :, 1)
true_parameter = rand(prior_dist)
y = vec(forward_map(true_parameter, model) + noise)
return prior, y, obs_noise_cov, model, true_parameter
end

struct Linear{AM <: AbstractMatrix} <: ForwardMapType
input_dim::Int
output_dim::Int
G::AM
end

function forward_map(X::AVorM, model::Linear) where {AVorM <: AbstractVecOrMat}
return model.G * X
end

function jac_forward_map(X::AbstractMatrix, model::Linear)
return [model.G for _ in eachcol(X)]
end
62 changes: 62 additions & 0 deletions examples/DimensionReduction/problems/problem_linear_exp.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
using LinearAlgebra
using EnsembleKalmanProcesses
using EnsembleKalmanProcesses.ParameterDistributions
using Statistics
using Distributions

# Inverse problem will be taken from (Cui, Tong, 2021) https://arxiv.org/pdf/2101.02417, example 7.1
include("forward_maps.jl")

function linear_exp(input_dim, output_dim, rng)
# prior
γ0 = 4.0
β_γ = -2
Γ = Diagonal([γ0 * (1.0 * j)^β_γ for j in 1:input_dim])
prior_dist = MvNormal(zeros(input_dim), Γ)
prior = ParameterDistribution(
Dict(
"distribution" => Parameterized(prior_dist),
"constraint" => repeat([no_constraint()], input_dim),
"name" => "param_$(input_dim)",
),
)

# forward map
# random linear-exp forward map from Stewart 1980: https://www.jstor.org/stable/2156882?seq=2
U = qr(randn(rng, (output_dim, output_dim))).Q
V = qr(randn(rng, (input_dim, input_dim))).Q
λ0 = 100.0
β_λ = -1
Λ = Diagonal([λ0 * (1.0 * j)^β_λ for j in 1:output_dim])
A = U * Λ * V[1:output_dim, :] # output x input
model = LinearExp(input_dim, output_dim, A)

# generate data sample
obs_noise_std = 1.0
obs_noise_cov = (obs_noise_std^2) * I(output_dim)
noise = rand(rng, MvNormal(zeros(output_dim), obs_noise_cov))
# true_parameter = reshape(ones(input_dim), :, 1)
true_parameter = rand(prior_dist)
y = vec(forward_map(true_parameter, model) + noise)
return prior, y, obs_noise_cov, model, true_parameter
end


## G*exp(X)
struct LinearExp{AM <: AbstractMatrix} <: ForwardMapType
input_dim::Int
output_dim::Int
G::AM
end

# columns of X are samples
function forward_map(X::AVorM, model::LE) where {LE <: LinearExp, AVorM <: AbstractVecOrMat}
return model.G * exp.(X)
end

# columns of X are samples
function jac_forward_map(X::AM, model::LE) where {AM <: AbstractMatrix, LE <: LinearExp}
# dGi / dXj = G_ij exp(x_j) = G.*exp.(mat with repeated x_j rows)
# return [G * exp.(Diagonal(r)) for r in eachrow(X')] # correct but extra multiplies
return [model.G .* exp.(reshape(c, 1, :)) for c in eachcol(X)]
end
56 changes: 56 additions & 0 deletions examples/DimensionReduction/problems/problem_linlinexp.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
using LinearAlgebra
using EnsembleKalmanProcesses
using EnsembleKalmanProcesses.ParameterDistributions
using Statistics
using Distributions

include("forward_maps.jl")

function linlinexp(input_dim, output_dim, rng)
# prior
γ0 = 4.0
β_γ = -2
Γ = Diagonal([γ0 * (1.0 * j)^β_γ for j in 1:input_dim])
prior_dist = MvNormal(zeros(input_dim), Γ)
prior = ParameterDistribution(
Dict(
"distribution" => Parameterized(prior_dist),
"constraint" => repeat([no_constraint()], input_dim),
"name" => "param_$(input_dim)",
),
)

U = qr(randn(rng, (output_dim, output_dim))).Q
V = qr(randn(rng, (input_dim, input_dim))).Q
λ0 = 100.0
β_λ = -1
Λ = Diagonal([λ0 * (1.0 * j)^β_λ for j in 1:output_dim])
A = U * Λ * V[1:output_dim, :] # output x input
model = LinLinExp(input_dim, output_dim, A)

# generate data sample
obs_noise_cov = Diagonal([Float64(j)^(-1 / 2) for j in 1:output_dim])
noise = rand(rng, MvNormal(zeros(output_dim), obs_noise_cov))
# true_parameter = reshape(ones(input_dim), :, 1)
true_parameter = rand(prior_dist)
y = vec(forward_map(true_parameter, model) + noise)
return prior, y, obs_noise_cov, model, true_parameter
end

struct LinLinExp{AM <: AbstractMatrix} <: ForwardMapType
input_dim::Int
output_dim::Int
G::AM
end

function forward_map(X::AVorM, model::LinLinExp) where {AVorM <: AbstractVecOrMat}
return model.G * (X .* exp.(0.05X))
end

function jac_forward_map(X::AbstractVector, model::LinLinExp)
return model.G * Diagonal(exp.(0.05X) .* (1 .+ 0.05X))
end

function jac_forward_map(X::AbstractMatrix, model::LinLinExp)
return [jac_forward_map(x, model) for x in eachcol(X)]
end
Loading
Loading