1+ export LMModel
2+
3+ @doc raw """
4+ LMModel(j_prod!, jt_prod, F, v, σ, xk)
5+
6+ Given the unconstrained optimization problem:
7+ ```math
8+ \m in \t frac{1}{2} \| F(x) \| ^2,
9+ ```
10+ this model represents the smooth LM subproblem:
11+ ```math
12+ \m in_s \ \t frac{1}{2} \| F(x) + J(x)s \| ^2 + \t frac{1}{2} σ \| s\| ^2
13+ ```
14+ where `J` is the Jacobian of `F` at `xk`, represented via matrix-free operations.
15+ `j_prod!(xk, s, out)` computes `J(xk) * s`, and `jt_prod!(xk, r, out)` computes `J(xk)' * r`.
16+
17+ `σ > 0` is a regularization parameter and `v` is a vector of the same size as `F(xk)` used for intermediary computations.
18+ """
19+ mutable struct LMModel{T <: Real , V <: AbstractVector{T} , J <: Function , Jt <: Function } < :
20+ AbstractNLPModel{T, V}
21+ j_prod!:: J
22+ jt_prod!:: Jt
23+ F:: V
24+ v:: V
25+ xk:: V
26+ σ:: T
27+ meta:: NLPModelMeta{T, V}
28+ counters:: Counters
29+ end
30+
31+ function LMModel (j_prod!:: J , jt_prod!:: Jt , F:: V , σ:: T , xk:: V ) where {T, V, J, Jt}
32+ meta = NLPModelMeta (
33+ length (xk),
34+ x0 = xk, # Perhaps we should add lvar and uvar as well here.
35+ )
36+ v = similar (F)
37+ return LMModel (j_prod!, jt_prod!, F, v, xk, σ, meta, Counters ())
38+ end
39+
40+ function NLPModels. obj (nlp:: LMModel , x:: AbstractVector{T} ) where {T}
41+ @lencheck nlp. meta. nvar x
42+ increment! (nlp, :neval_obj )
43+ nlp. j_prod! (nlp. xk, x, nlp. v) # v = J(xk)x
44+ nlp. v .+ = nlp. F
45+ return ( dot (nlp. v, nlp. v) + nlp. σ * dot (x, x) ) / 2
46+ end
47+
48+ function NLPModels. grad! (nlp:: LMModel , x:: AbstractVector{T} , g:: AbstractVector{T} ) where {T}
49+ @lencheck nlp. meta. nvar x
50+ @lencheck nlp. meta. nvar g
51+ increment! (nlp, :neval_grad )
52+ nlp. j_prod! (nlp. xk, x, nlp. v) # v = J(xk)x + F
53+ nlp. v .+ = nlp. F
54+ nlp. jt_prod! (nlp. xk, nlp. v, g) # g = J^T(xk) v
55+ @. g += nlp. σ .* x
56+ return g
57+ end
0 commit comments