Skip to content

Commit 0d52ed0

Browse files
authored
Add bound-constrained NLS (#98)
* Add bound-constrained NLS
1 parent 4e3287f commit 0d52ed0

File tree

2 files changed

+181
-1
lines changed

2 files changed

+181
-1
lines changed

src/NLPModelsTest.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ using NLPModels, NLPModelsModifiers
77

88
const nlp_problems =
99
["BROWNDEN", "HS5", "HS6", "HS10", "HS11", "HS13", "HS14", "LINCON", "LINSV", "MGH01Feas"]
10-
const nls_problems = ["LLS", "MGH01", "NLSHS20", "NLSLC"]
10+
const nls_problems = ["LLS", "MGH01", "BNDROSENBROCK", "NLSHS20", "NLSLC"]
1111

1212
# Including problems so that they won't be multiply loaded
1313
# GENROSE does not have a manual version, so it's separate

src/nls/problems/bndrosenbrock.jl

Lines changed: 180 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,180 @@
1+
export BNDROSENBROCK
2+
3+
"""
4+
nls = BNDROSENBROCK()
5+
6+
## Rosenbrock function in nonlinear least squares format with bound constraints.
7+
8+
```math
9+
\\begin{aligned}
10+
\\min \\quad & \\tfrac{1}{2}\\| F(x) \\|^2
11+
\\text{s. to} \\quad & -1 \\leq x_1 \\leq 0.8 \\\\
12+
& -2 \\leq x_2 \\leq 2
13+
\\end{aligned}
14+
```
15+
where
16+
```math
17+
F(x) = \\begin{bmatrix}
18+
1 - x_1 \\\\
19+
10 (x_2 - x_1^2)
20+
\\end{bmatrix}.
21+
```
22+
23+
Starting point: `[-1.2; 1]`.
24+
"""
25+
mutable struct BNDROSENBROCK{T, S} <: AbstractNLSModel{T, S}
26+
meta::NLPModelMeta{T, S}
27+
nls_meta::NLSMeta{T, S}
28+
counters::NLSCounters
29+
end
30+
31+
function BNDROSENBROCK(::Type{T}) where {T}
32+
meta = NLPModelMeta{T, Vector{T}}(2, x0 = T[-1.2; 1], lvar = T[-1; -2], uvar = T[0.8; 2], name = "BNDROSENBROCK_manual")
33+
nls_meta = NLSMeta{T, Vector{T}}(2, 2, nnzj = 3, nnzh = 1)
34+
35+
return BNDROSENBROCK(meta, nls_meta, NLSCounters())
36+
end
37+
BNDROSENBROCK() = BNDROSENBROCK(Float64)
38+
39+
function NLPModels.residual!(nls::BNDROSENBROCK, x::AbstractVector, Fx::AbstractVector)
40+
@lencheck 2 x Fx
41+
increment!(nls, :neval_residual)
42+
Fx[1] = 1 - x[1]
43+
Fx[2] = 10 * (x[2] - x[1]^2)
44+
return Fx
45+
end
46+
47+
# Jx = [-1 0; -20x₁ 10]
48+
function NLPModels.jac_structure_residual!(
49+
nls::BNDROSENBROCK,
50+
rows::AbstractVector{<:Integer},
51+
cols::AbstractVector{<:Integer},
52+
)
53+
@lencheck 3 rows cols
54+
rows[1] = 1
55+
cols[1] = 1
56+
rows[2] = 2
57+
cols[2] = 1
58+
rows[3] = 2
59+
cols[3] = 2
60+
return rows, cols
61+
end
62+
63+
function NLPModels.jac_coord_residual!(nls::BNDROSENBROCK, x::AbstractVector, vals::AbstractVector)
64+
@lencheck 2 x
65+
@lencheck 3 vals
66+
increment!(nls, :neval_jac_residual)
67+
vals[1] = -1
68+
vals[2] = -20x[1]
69+
vals[3] = 10
70+
return vals
71+
end
72+
73+
function NLPModels.jprod_residual!(
74+
nls::BNDROSENBROCK,
75+
x::AbstractVector,
76+
v::AbstractVector,
77+
Jv::AbstractVector,
78+
)
79+
@lencheck 2 x v Jv
80+
increment!(nls, :neval_jprod_residual)
81+
Jv[1] = -v[1]
82+
Jv[2] = -20 * x[1] * v[1] + 10 * v[2]
83+
return Jv
84+
end
85+
86+
function NLPModels.jtprod_residual!(
87+
nls::BNDROSENBROCK,
88+
x::AbstractVector,
89+
v::AbstractVector,
90+
Jtv::AbstractVector,
91+
)
92+
@lencheck 2 x v Jtv
93+
increment!(nls, :neval_jtprod_residual)
94+
Jtv[1] = -v[1] - 20 * x[1] * v[2]
95+
Jtv[2] = 10 * v[2]
96+
return Jtv
97+
end
98+
99+
function NLPModels.hess_structure_residual!(
100+
nls::BNDROSENBROCK,
101+
rows::AbstractVector{<:Integer},
102+
cols::AbstractVector{<:Integer},
103+
)
104+
@lencheck 1 rows cols
105+
rows[1] = 1
106+
cols[1] = 1
107+
return rows, cols
108+
end
109+
110+
function NLPModels.hess_coord_residual!(
111+
nls::BNDROSENBROCK,
112+
x::AbstractVector,
113+
v::AbstractVector,
114+
vals::AbstractVector,
115+
)
116+
@lencheck 2 x v
117+
@lencheck 1 vals
118+
increment!(nls, :neval_hess_residual)
119+
vals[1] = -20v[2]
120+
return vals
121+
end
122+
123+
function NLPModels.hprod_residual!(
124+
nls::BNDROSENBROCK,
125+
x::AbstractVector,
126+
i::Int,
127+
v::AbstractVector,
128+
Hiv::AbstractVector,
129+
)
130+
@lencheck 2 x v Hiv
131+
increment!(nls, :neval_hprod_residual)
132+
if i == 2
133+
Hiv[1] = -20v[1]
134+
Hiv[2] = zero(eltype(x))
135+
else
136+
Hiv .= zero(eltype(x))
137+
end
138+
return Hiv
139+
end
140+
141+
function NLPModels.hess_structure!(nls::BNDROSENBROCK, rows::AbstractVector{Int}, cols::AbstractVector{Int})
142+
@lencheck 3 rows cols
143+
n = nls.meta.nvar
144+
k = 0
145+
for j = 1:n, i = j:n
146+
k += 1
147+
rows[k] = i
148+
cols[k] = j
149+
end
150+
return rows, cols
151+
end
152+
153+
function NLPModels.hess_coord!(
154+
nls::BNDROSENBROCK,
155+
x::AbstractVector{T},
156+
vals::AbstractVector;
157+
obj_weight = one(T),
158+
) where {T}
159+
@lencheck 2 x
160+
@lencheck 3 vals
161+
vals[1] = T(1) - 200 * x[2] + 600 * x[1]^2
162+
vals[2] = -200 * x[1]
163+
vals[3] = T(100)
164+
vals .*= obj_weight
165+
return vals
166+
end
167+
168+
function NLPModels.hprod!(
169+
nls::BNDROSENBROCK,
170+
x::AbstractVector{T},
171+
v::AbstractVector{T},
172+
Hv::AbstractVector{T};
173+
obj_weight = one(T),
174+
) where {T}
175+
@lencheck 2 x v Hv
176+
increment!(nls, :neval_hprod)
177+
Hv[1] = obj_weight * ((T(1) - 200 * x[2] + 600 * x[1]^2) * v[1] - 200 * x[1] * v[2])
178+
Hv[2] = obj_weight * (-200 * x[1] * v[1] + T(100) * v[2])
179+
return Hv
180+
end

0 commit comments

Comments
 (0)