Skip to content

Commit a2be027

Browse files
Fixed OptimizationCache issue
1 parent 8d9cc12 commit a2be027

File tree

3 files changed

+29
-24
lines changed

3 files changed

+29
-24
lines changed

lib/OptimizationOptimJL/Project.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,15 @@ version = "0.4.3"
66
[deps]
77
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
88
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
9+
OptimizationBase = "bca83a33-5cc9-4baa-983d-23429ab6bcbb"
910
PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a"
1011
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
1112
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
1213

1314
[compat]
1415
Optim = "1"
1516
Optimization = "4"
17+
OptimizationBase = "2.10.0"
1618
PrecompileTools = "1.2"
1719
Reexport = "1.2"
1820
SparseArrays = "1.6"

lib/OptimizationOptimJL/src/OptimizationOptimJL.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
module OptimizationOptimJL
22

33
using Reexport
4-
@reexport using Optim, Optimization
5-
using Optimization.SciMLBase, SparseArrays
4+
@reexport using Optim
5+
using Optimization
6+
using OptimizationBase.SciMLBase, SparseArrays
7+
using OptimizationBase: OptimizationCache
68
decompose_trace(trace::Optim.OptimizationTrace) = last(trace)
79
decompose_trace(trace::Optim.OptimizationState) = trace
810

lib/OptimizationOptimJL/test/runtests.jl

Lines changed: 23 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
using OptimizationOptimJL,
22
OptimizationOptimJL.Optim, Optimization, ForwardDiff, Zygote, ReverseDiff,
3-
Random, ModelingToolkit, Optimization.OptimizationBase.DifferentiationInterface
3+
Random, ModelingToolkit, OptimizationBase.DifferentiationInterface
4+
using OptimizationBase.SciMLBase: OptimizationFunction, OptimizationProblem, solve, MaxSense
45
using Test
56

67
struct CallbackTester
@@ -42,7 +43,7 @@ end
4243
b = 0.5)); callback = CallbackTester(length(x0)))
4344
@test 10 * sol.objective < l1
4445

45-
f = OptimizationFunction(rosenbrock, AutoReverseDiff())
46+
f = OptimizationFunction(rosenbrock, OptimizationBase.AutoReverseDiff())
4647

4748
Random.seed!(1234)
4849
prob = OptimizationProblem(f, x0, _p, lb = [-1.0, -1.0], ub = [0.8, 0.8])
@@ -85,14 +86,14 @@ end
8586
@test sol.original.iterations > 2
8687

8788
cons = (res, x, p) -> res .= [x[1]^2 + x[2]^2]
88-
optprob = OptimizationFunction(rosenbrock, Optimization.AutoModelingToolkit();
89+
optprob = OptimizationFunction(rosenbrock, OptimizationBase.AutoModelingToolkit();
8990
cons = cons)
9091

9192
prob = OptimizationProblem(optprob, x0, _p, lcons = [-5.0], ucons = [10.0])
9293
sol = solve(prob, IPNewton())
9394
@test 10 * sol.objective < l1
9495

95-
optprob = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();
96+
optprob = OptimizationFunction(rosenbrock, OptimizationBase.AutoForwardDiff();
9697
cons = cons)
9798

9899
prob = OptimizationProblem(optprob, x0, _p, lcons = [-Inf], ucons = [Inf])
@@ -108,14 +109,14 @@ end
108109
res .= [x[1]^2 + x[2]^2, x[2] * sin(x[1]) - x[1]]
109110
end
110111

111-
optprob = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();
112+
optprob = OptimizationFunction(rosenbrock, OptimizationBase.AutoForwardDiff();
112113
cons = con2_c)
113114
prob = OptimizationProblem(optprob, x0, _p, lcons = [-Inf, -Inf], ucons = [Inf, Inf])
114115
sol = solve(prob, IPNewton())
115116
@test 10 * sol.objective < l1
116117

117118
cons_circ = (res, x, p) -> res .= [x[1]^2 + x[2]^2]
118-
optprob = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();
119+
optprob = OptimizationFunction(rosenbrock, OptimizationBase.AutoForwardDiff();
119120
cons = cons_circ)
120121
prob = OptimizationProblem(optprob, x0, _p, lcons = [-Inf], ucons = [0.25^2])
121122
cache = Optimization.init(prob, Optim.IPNewton())
@@ -124,7 +125,7 @@ end
124125
cons(res, sol.u, nothing)
125126
@test sqrt(res[1])0.25 rtol=1e-6
126127

127-
optprob = OptimizationFunction(rosenbrock, Optimization.AutoZygote())
128+
optprob = OptimizationFunction(rosenbrock, OptimizationBase.AutoZygote())
128129

129130
prob = OptimizationProblem(optprob, x0, _p, lb = [-1.0, -1.0], ub = [0.8, 0.8])
130131
sol = solve(
@@ -137,8 +138,8 @@ end
137138
sol = Optimization.solve!(cache)
138139
@test 10 * sol.objective < l1
139140

140-
optprob = OptimizationFunction((x, p) -> -rosenbrock(x, p), Optimization.AutoZygote())
141-
prob = OptimizationProblem(optprob, x0, _p; sense = Optimization.MaxSense)
141+
optprob = OptimizationFunction((x, p) -> -rosenbrock(x, p), OptimizationBase.AutoZygote())
142+
prob = OptimizationProblem(optprob, x0, _p; sense = MaxSense)
142143

143144
sol = solve(prob, NelderMead())
144145
@test 10 * sol.objective < l1
@@ -150,28 +151,28 @@ end
150151
G[1] = -2.0 * (1.0 - x[1]) - 400.0 * (x[2] - x[1]^2) * x[1]
151152
G[2] = 200.0 * (x[2] - x[1]^2)
152153
end
153-
optprob = OptimizationFunction((x, p) -> -rosenbrock(x, p), Optimization.AutoZygote(),
154+
optprob = OptimizationFunction((x, p) -> -rosenbrock(x, p), OptimizationBase.AutoZygote(),
154155
grad = g!)
155-
prob = OptimizationProblem(optprob, x0, _p; sense = Optimization.MaxSense)
156+
prob = OptimizationProblem(optprob, x0, _p; sense = MaxSense)
156157
sol = solve(prob, BFGS())
157158
@test 10 * sol.objective < l1
158159

159-
optprob = OptimizationFunction(rosenbrock, Optimization.AutoModelingToolkit())
160+
optprob = OptimizationFunction(rosenbrock, OptimizationBase.AutoModelingToolkit())
160161
prob = OptimizationProblem(optprob, x0, _p)
161162
sol = solve(prob, Optim.BFGS())
162163
@test 10 * sol.objective < l1
163164

164165
optprob = OptimizationFunction(rosenbrock,
165-
Optimization.AutoModelingToolkit(true, false))
166+
OptimizationBase.AutoModelingToolkit(true, false))
166167
prob = OptimizationProblem(optprob, x0, _p)
167168
sol = solve(prob, Optim.Newton())
168-
@test 10 * sol.objective < l1
169+
@test sol.objective < l1
169170

170171
sol = solve(prob, Optim.KrylovTrustRegion())
171-
@test 10 * sol.objective < l1
172+
@test sol.objective < l1
172173

173174
prob = OptimizationProblem(
174-
optprob, x0, _p; sense = Optimization.MaxSense, lb = [-1.0, -1.0], ub = [0.8, 0.8])
175+
optprob, x0, _p; sense = MaxSense, lb = [-1.0, -1.0], ub = [0.8, 0.8])
175176
sol = solve(prob, BFGS())
176177
@test 10 * sol.objective < l1
177178

@@ -200,17 +201,17 @@ end
200201
@test 10 * sol.objective < l1
201202

202203
@testset "cache" begin
203-
objective(x, p) = (p[1] - x[1])^2
204-
x0 = zeros(1)
205-
p = [1.0]
204+
objective(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2
205+
x0 = zeros(2)
206+
p = [1.0, 100.0]
206207

207208
prob = OptimizationProblem(objective, x0, p)
208209
cache = Optimization.init(prob, Optim.NelderMead())
209210
sol = Optimization.solve!(cache)
210-
@test sol.u[1.0] atol=1e-3
211+
@test sol.u[1.0, 1.0] atol=1e-3
211212

212-
cache = Optimization.reinit!(cache; p = [2.0])
213+
cache = Optimization.reinit!(cache; p = [2.0, 100.0])
213214
sol = Optimization.solve!(cache)
214-
@test sol.u[2.0] atol=1e-3
215+
@test sol.u[2.0, 4.0] atol=1e-3 # expected minimum for p = [2.0, 100.0]
215216
end
216217
end

0 commit comments

Comments
 (0)