Skip to content

Commit 1c09f54

Browse files
add LMModel
1 parent e8a201a commit 1c09f54

File tree

1 file changed

+57
-0
lines changed

1 file changed

+57
-0
lines changed

src/LMModel.jl

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
export LMModel
2+
3+
@doc raw"""
4+
LMModel(j_prod!, jt_prod, F, v, σ, xk)
5+
6+
Given the unconstrained optimization problem:
7+
```math
8+
\min \tfrac{1}{2} \| F(x) \|^2,
9+
```
10+
this model represents the smooth LM subproblem:
11+
```math
12+
\min_s \ \tfrac{1}{2} \| F(x) + J(x)s \|^2 + \tfrac{1}{2} σ \|s\|^2
13+
```
14+
where `J` is the Jacobian of `F` at `xk`, represented via matrix-free operations.
15+
`j_prod!(xk, s, out)` computes `J(xk) * s`, and `jt_prod!(xk, r, out)` computes `J(xk)' * r`.
16+
17+
`σ > 0` is a regularization parameter and `v` is a vector of the same size as `F(xk)` used for intermediary computations.
18+
"""
19+
mutable struct LMModel{T <: Real, V <: AbstractVector{T}, J <: Function , Jt <: Function} <:
20+
AbstractNLPModel{T, V}
21+
j_prod!::J
22+
jt_prod!::Jt
23+
F::V
24+
v::V
25+
xk::V
26+
σ::T
27+
meta::NLPModelMeta{T, V}
28+
counters::Counters
29+
end
30+
31+
function LMModel(j_prod!::J, jt_prod!::Jt, F::V, σ::T, xk::V) where {T, V, J, Jt}
32+
meta = NLPModelMeta(
33+
length(xk),
34+
x0 = xk, # Perhaps we should add lvar and uvar as well here.
35+
)
36+
v = similar(F)
37+
return LMModel(j_prod!, jt_prod!, F, v, xk, σ, meta, Counters())
38+
end
39+
40+
function NLPModels.obj(nlp::LMModel, x::AbstractVector{T}) where{T}
41+
@lencheck nlp.meta.nvar x
42+
increment!(nlp, :neval_obj)
43+
nlp.j_prod!(nlp.xk, x, nlp.v) # v = J(xk)x
44+
nlp.v .+= nlp.F
45+
return ( dot(nlp.v, nlp.v) + nlp.σ * dot(x, x) ) / 2
46+
end
47+
48+
function NLPModels.grad!(nlp::LMModel, x::AbstractVector{T}, g::AbstractVector{T}) where{T}
49+
@lencheck nlp.meta.nvar x
50+
@lencheck nlp.meta.nvar g
51+
increment!(nlp, :neval_grad)
52+
nlp.j_prod!(nlp.xk, x, nlp.v) # v = J(xk)x + F
53+
nlp.v .+= nlp.F
54+
nlp.jt_prod!(nlp.xk, nlp.v, g) # g = J^T(xk) v
55+
@. g += nlp.σ .* x
56+
return g
57+
end

0 commit comments

Comments
 (0)