Skip to content

Commit ec28838

Browse files
Merge branch 'master' into nloptcons
2 parents 41c56df + 4c989c1 commit ec28838

File tree

57 files changed

+1329
-1194
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+1329
-1194
lines changed

.github/workflows/CI.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ jobs:
2222
- OptimizationBBO
2323
- OptimizationCMAEvolutionStrategy
2424
- OptimizationEvolutionary
25-
- OptimizationFlux
2625
- OptimizationGCMAES
2726
- OptimizationManopt
2827
- OptimizationMetaheuristics
@@ -36,6 +35,7 @@ jobs:
3635
- OptimizationQuadDIRECT
3736
- OptimizationSpeedMapping
3837
- OptimizationPolyalgorithms
38+
- OptimizationNLPModels
3939
version:
4040
- '1'
4141
steps:
@@ -61,7 +61,7 @@ jobs:
6161
GROUP: ${{ matrix.group }}
6262
- uses: julia-actions/julia-processcoverage@v1
6363
with:
64-
directories: src,lib/OptimizationBBO/src,lib/OptimizationCMAEvolutionStrategy/src,lib/OptimizationEvolutionary/src,lib/OptimizationFlux/src,lib/OptimizationGCMAES/src,lib/OptimizationMOI/src,lib/OptimizationMetaheuristics/src,lib/OptimizationMultistartOptimization/src,lib/OptimizationNLopt/src,lib/OptimizationNOMAD/src,lib/OptimizationOptimJL/src,lib/OptimizationOptimisers/src,lib/OptimizationPolyalgorithms/src,lib/OptimizationQuadDIRECT/src,lib/OptimizationSpeedMapping/src
64+
directories: src,lib/OptimizationBBO/src,lib/OptimizationCMAEvolutionStrategy/src,lib/OptimizationEvolutionary/src,lib/OptimizationGCMAES/src,lib/OptimizationManopt/src,lib/OptimizationMOI/src,lib/OptimizationMetaheuristics/src,lib/OptimizationMultistartOptimization/src,lib/OptimizationNLopt/src,lib/OptimizationNOMAD/src,lib/OptimizationOptimJL/src,lib/OptimizationOptimisers/src,lib/OptimizationPolyalgorithms/src,lib/OptimizationQuadDIRECT/src,lib/OptimizationSpeedMapping/src
6565
- uses: codecov/codecov-action@v4
6666
with:
6767
file: lcov.info

Project.toml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
name = "Optimization"
22
uuid = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
3-
version = "3.27.0"
3+
version = "3.28.0"
44

55
[deps]
66
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
@@ -11,8 +11,8 @@ LBFGSB = "5be7bae1-8223-5378-bac3-9e7378a2f6e6"
1111
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1212
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
1313
LoggingExtras = "e6f89c97-d47a-5376-807f-9c37f3926c36"
14+
MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54"
1415
OptimizationBase = "bca83a33-5cc9-4baa-983d-23429ab6bcbb"
15-
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
1616
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
1717
ProgressLogging = "33c8b6b6-d38a-422a-b730-caa89a2f386c"
1818
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
@@ -29,8 +29,8 @@ LBFGSB = "0.4.1"
2929
LinearAlgebra = "1.10"
3030
Logging = "1.10"
3131
LoggingExtras = "0.4, 1"
32-
OptimizationBase = "1.3.3"
33-
Pkg = "1"
32+
MLUtils = "0.4.4"
33+
OptimizationBase = "2.0.3"
3434
Printf = "1.10"
3535
ProgressLogging = "0.1"
3636
Reexport = "1.2"

docs/Project.toml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84"
1212
Manifolds = "1cead3c2-87b3-11e9-0ccd-23c62b72b94e"
1313
Manopt = "0fc0a36d-df90-57f3-8f93-d78a9fc72bb5"
1414
ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78"
15+
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
16+
NLPModelsTest = "7998695d-6960-4d3a-85c4-e1bceb8cd856"
1517
NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd"
1618
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
1719
OptimizationBBO = "3e6eede4-6085-4f62-9a71-46d9bc1eb92b"
@@ -24,6 +26,7 @@ OptimizationMOI = "fd9f6733-72f4-499f-8506-86b2bdd0dea1"
2426
OptimizationManopt = "e57b7fff-7ee7-4550-b4f0-90e9476e9fb6"
2527
OptimizationMetaheuristics = "3aafef2f-86ae-4776-b337-85a36adf0b55"
2628
OptimizationMultistartOptimization = "e4316d97-8bbb-4fd3-a7d8-3851d2a72823"
29+
OptimizationNLPModels = "064b21be-54cf-11ef-1646-cdfee32b588f"
2730
OptimizationNLopt = "4e6fcdb7-1186-4e1f-a706-475e75c168bb"
2831
OptimizationNOMAD = "2cab0595-8222-4775-b714-9828e6a9e01b"
2932
OptimizationOptimJL = "36348300-93cb-4f02-beb5-3c3902f8871e"
@@ -53,6 +56,8 @@ Juniper = "0.9"
5356
Manifolds = "0.9"
5457
Manopt = "0.4"
5558
ModelingToolkit = "9"
59+
NLPModels = "0.21"
60+
NLPModelsTest = "0.10"
5661
NLopt = "0.6, 1"
5762
Optimization = "3"
5863
OptimizationBBO = "0.1, 0.2, 0.3"
@@ -65,6 +70,7 @@ OptimizationMOI = "0.1, 0.2, 0.3, 0.4"
6570
OptimizationManopt = "0.0.2, 0.0.3"
6671
OptimizationMetaheuristics = "0.1, 0.2"
6772
OptimizationMultistartOptimization = "0.1, 0.2"
73+
OptimizationNLPModels = "0.0.1"
6874
OptimizationNLopt = "0.1, 0.2"
6975
OptimizationNOMAD = "0.1, 0.2"
7076
OptimizationOptimJL = "0.1, 0.2, 0.3"
@@ -76,5 +82,7 @@ OrdinaryDiffEq = "6"
7682
ReverseDiff = ">= 1.9.0"
7783
SciMLBase = "2.30.0"
7884
SciMLSensitivity = "7"
85+
SymbolicAnalysis = "0.3"
86+
Symbolics = "6"
7987
Tracker = ">= 0.2"
8088
Zygote = ">= 0.5"

docs/pages.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ pages = ["index.md",
3636
"PRIMA.jl" => "optimization_packages/prima.md",
3737
"Polyalgorithms.jl" => "optimization_packages/polyopt.md",
3838
"QuadDIRECT.jl" => "optimization_packages/quaddirect.md",
39-
"SpeedMapping.jl" => "optimization_packages/speedmapping.md"
39+
"SpeedMapping.jl" => "optimization_packages/speedmapping.md",
40+
"NLPModels.jl" => "optimization_packages/nlpmodels.md"
4041
]
4142
]

docs/src/index.md

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -170,9 +170,7 @@ to add the specific wrapper packages.
170170
- Unconstrained: ✅
171171
</details>
172172
🟡 = supported in downstream library but not yet implemented in `Optimization.jl`; PR to add this functionality are welcome
173-
174173
## Citation
175-
176174
```
177175
@software{vaibhav_kumar_dixit_2023_7738525,
178176
author = {Vaibhav Kumar Dixit and Christopher Rackauckas},
@@ -184,48 +182,37 @@ to add the specific wrapper packages.
184182
url = {https://doi.org/10.5281/zenodo.7738525},
185183
year = 2023}
186184
```
187-
188185
## Reproducibility
189-
190186
```@raw html
191187
<details><summary>The documentation of this SciML package was built using these direct dependencies,</summary>
192188
```
193-
194189
```@example
195190
using Pkg # hide
196191
Pkg.status() # hide
197192
```
198-
199193
```@raw html
200194
</details>
201195
```
202-
203196
```@raw html
204197
<details><summary>and using this machine and Julia version.</summary>
205198
```
206-
207199
```@example
208200
using InteractiveUtils # hide
209201
versioninfo() # hide
210202
```
211-
212203
```@raw html
213204
</details>
214205
```
215-
216206
```@raw html
217207
<details><summary>A more complete overview of all dependencies and their versions is also provided.</summary>
218208
```
219-
220209
```@example
221210
using Pkg # hide
222211
Pkg.status(; mode = PKGMODE_MANIFEST) # hide
223212
```
224-
225213
```@raw html
226214
</details>
227215
```
228-
229216
```@eval
230217
using TOML
231218
using Markdown
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# NLPModels.jl
2+
3+
[NLPModels](https://jso.dev/NLPModels.jl/latest/), similarly to Optimization.jl itself,
4+
provides a standardized modeling interface for representing Non-Linear Programs that
5+
facilitates using different solvers on the same problem. The Optimization.jl extension of
6+
NLPModels aims to provide a thin translation layer to make `NLPModel`s, the main export of
7+
the package, compatible with the optimizers in the Optimization.jl ecosystem.
8+
9+
## Installation: NLPModels.jl
10+
11+
To translate an `NLPModel`, install the OptimizationNLPModels package:
12+
13+
```julia
14+
import Pkg;
15+
Pkg.add("OptimizationNLPModels")
16+
```
17+
18+
The package NLPModels.jl itself contains no optimizers or models. Several packages
19+
provide optimization problem ([CUTEst.jl](https://jso.dev/CUTEst.jl/stable/),
20+
[NLPModelsTest.jl](https://jso.dev/NLPModelsTest.jl/dev/)) which can then be solved with
21+
any optimizer supported by Optimization.jl
22+
23+
## Usage
24+
25+
For example, solving a problem defined in `NLPModelsTest` with
26+
[`Ipopt.jl`](https://github.com/jump-dev/Ipopt.jl). First, install the packages like so:
27+
28+
```julia
29+
import Pkg;
30+
Pkg.add("NLPModelsTest", "Ipopt")
31+
```
32+
33+
We instantiate [problem
34+
10](https://jso.dev/NLPModelsTest.jl/dev/reference/#NLPModelsTest.HS10) in the
35+
Hock--Schittkowski optimization suite available from `NLPModelsTest` as `HS10`, then
36+
translate it to an `OptimizationProblem`.
37+
38+
```@example NLPModels
39+
using OptimizationNLPModels, Optimization, NLPModelsTest, Ipopt
40+
using Optimization: OptimizationProblem
41+
nlpmodel = NLPModelsTest.HS10()
42+
prob = OptimizationProblem(nlpmodel, AutoForwardDiff())
43+
```
44+
45+
which can now be solved like any other `OptimizationProblem`:
46+
47+
```@example NLPModels
48+
sol = solve(prob, Ipopt.Optimizer())
49+
```
50+
51+
## API
52+
53+
Problems represented as `NLPModel`s can be used to create [`OptimizationProblem`](@ref)s and
54+
[`OptimizationFunction`](@ref).
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
# Creating polyalgorithms by chaining solvers using `remake`
2+
3+
The general framework of using multiple solvers to use exploration-convergence alternations is commonly
4+
known as polyalgorithms. In the past Optimization.jl has provided a `PolyOpt` solver in [`OptimizationPolyalgorithms.jl`](@ref) which combined Adam from Optimisers.jl with BFGS from Optim.jl.
5+
With the large number of choices available through the interface unique combinations of solvers can be effective for specific problems.
6+
7+
In this tutorial we will demonstrate how to use the `remake` function to chain together solvers to create your own polyalgorithms.
8+
9+
The SciML interface provides a `remake` function which allows you to recreate the `OptimizationProblem` from a previously defined `OptimizationProblem` with different initial guess for the optimization variables.
10+
11+
Let's look at a 10 dimensional schwefel function in the hypercube $x_i \in [-500, 500]$.
12+
13+
```@example polyalg
14+
using Optimization, Random
15+
using OptimizationBBO, ReverseDiff
16+
17+
Random.seed!(122333)
18+
19+
function f_schwefel(x, p = [418.9829])
20+
result = p[1] * length(x)
21+
for i in 1:length(x)
22+
result -= x[i] * sin(sqrt(abs(x[i])))
23+
end
24+
return result
25+
end
26+
27+
optf = OptimizationFunction(f_schwefel, Optimization.AutoReverseDiff(compile = true))
28+
29+
x0 = ones(10) .* 200.0
30+
prob = OptimizationProblem(
31+
optf, x0, [418.9829], lb = fill(-500.0, 10), ub = fill(500.0, 10))
32+
33+
@show f_schwefel(x0)
34+
```
35+
36+
Our polyalgorithm strategy will to use BlackBoxOptim's global optimizers for efficient exploration of the
37+
parameter space followed by a quasi-Newton LBFGS method to (hopefully) converge to the global
38+
optimum.
39+
40+
```@example polyalg
41+
res1 = solve(prob, BBO_adaptive_de_rand_1_bin(), maxiters = 4000)
42+
43+
@show res1.objective
44+
```
45+
46+
This is a good start can we converge to the global optimum?
47+
48+
```@example polyalg
49+
prob = remake(prob, u0 = res1.minimizer)
50+
res2 = solve(prob, Optimization.LBFGS(), maxiters = 100)
51+
52+
@show res2.objective
53+
```
54+
55+
Yay! We have found the global optimum (this is known to be at $x_i = 420.9687$).

lib/OptimizationBBO/src/OptimizationBBO.jl

Lines changed: 11 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ module OptimizationBBO
33
using Reexport
44
import Optimization
55
import BlackBoxOptim, Optimization.SciMLBase
6+
import Optimization.SciMLBase: MultiObjectiveOptimizationFunction
67

78
abstract type BBO end
89

@@ -15,6 +16,11 @@ for j in string.(BlackBoxOptim.SingleObjectiveMethodNames)
1516
eval(Meta.parse("export BBO_" * j))
1617
end
1718

19+
Base.@kwdef struct BBO_borg_moea <: BBO
20+
method = :borg_moea
21+
end
22+
export BBO_borg_moea
23+
1824
function decompose_trace(opt::BlackBoxOptim.OptRunController, progress)
1925
if progress
2026
maxiters = opt.max_steps
@@ -105,12 +111,6 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{
105111
}
106112
local x, cur, state
107113

108-
if cache.data != Optimization.DEFAULT_DATA
109-
maxiters = length(cache.data)
110-
end
111-
112-
cur, state = iterate(cache.data)
113-
114114
function _cb(trace)
115115
if cache.callback === Optimization.DEFAULT_CALLBACK
116116
cb_call = false
@@ -132,33 +132,24 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{
132132
BlackBoxOptim.shutdown_optimizer!(trace) #doesn't work
133133
end
134134

135-
if cache.data !== Optimization.DEFAULT_DATA
136-
cur, state = iterate(cache.data, state)
137-
end
138135
cb_call
139136
end
140137

141138
maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters)
142139
maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime)
143140

144141
_loss = function (θ)
145-
if cache.callback === Optimization.DEFAULT_CALLBACK &&
146-
cache.data === Optimization.DEFAULT_DATA
147-
return first(cache.f(θ, cache.p))
148-
elseif cache.callback === Optimization.DEFAULT_CALLBACK
149-
return first(cache.f(θ, cache.p, cur...))
150-
elseif cache.data !== Optimization.DEFAULT_DATA
151-
x = cache.f(θ, cache.p)
152-
return first(x)
142+
if isa(cache.f, MultiObjectiveOptimizationFunction)
143+
x = (cache.f(θ, cache.p),)
144+
return x[1]
153145
else
154-
x = cache.f(θ, cache.p, cur...)
146+
x = cache.f(θ, cache.p)
155147
return first(x)
156148
end
157149
end
158150

159151
opt_args = __map_optimizer_args(cache, cache.opt;
160-
callback = cache.callback === Optimization.DEFAULT_CALLBACK &&
161-
cache.data === Optimization.DEFAULT_DATA ?
152+
callback = cache.callback === Optimization.DEFAULT_CALLBACK ?
162153
nothing : _cb,
163154
cache.solver_args...,
164155
maxiters = maxiters,

0 commit comments

Comments
 (0)