Skip to content

Commit 9c39e0f

Browse files
reverted sequential running and uncommented ScipyBasinhopping
Signed-off-by: AdityaPandeyCN <[email protected]>
1 parent 1d073d3 commit 9c39e0f

File tree

1 file changed

+17
-46
lines changed

1 file changed

+17
-46
lines changed

benchmarks/GlobalOptimization/blackbox_global_optimizers.jmd

Lines changed: 17 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,10 @@ difficult global optima.
1515
```julia
1616
using BlackBoxOptimizationBenchmarking, Plots, Optimization, Memoize, Statistics
1717
import BlackBoxOptimizationBenchmarking.Chain
18-
using BlackBoxOptimizationBenchmarking: BenchmarkSetup
1918
const BBOB = BlackBoxOptimizationBenchmarking
2019

2120
using OptimizationBBO, OptimizationOptimJL, OptimizationEvolutionary, OptimizationNLopt
22-
using OptimizationMetaheuristics, OptimizationNOMAD, OptimizationPRIMA, OptimizationOptimisers, OptimizationSciPy
21+
using OptimizationMetaheuristics, OptimizationNOMAD, OptimizationPRIMA, OptimizationOptimisers, OptimizationSciPy, OptimizationPyCMA
2322
```
2423

2524
```julia
@@ -33,15 +32,7 @@ test_functions = BBOB.list_functions()
3332
dimension = 3
3433
run_length = round.(Int, 10 .^ LinRange(1,5,30))
3534

36-
@memoize function run_bench(algo)
37-
return BBOB.benchmark(setup[algo], test_functions, run_length; Ntrials = 40, dimension = dimension)
38-
end
39-
40-
const python_optimizers = Set([
41-
"ScipyDifferentialEvolution", "ScipyBasinhopping", "ScipyDualAnnealing",
42-
"ScipyShgo", "ScipyDirect", "ScipyBrute"
43-
])
44-
35+
@memoize run_bench(algo) = BBOB.benchmark(setup[algo], test_functions, run_length, Ntrials=40, dimension = dimension)
4536
```
4637

4738
```julia
@@ -69,11 +60,11 @@ setup = Dict(
6960
"Optimisers.RMSProp" => chain(Optimisers.RMSProp(), isboxed=false),
7061
# SciPy global optimizers
7162
"ScipyDifferentialEvolution" => chain(ScipyDifferentialEvolution(), isboxed=true),
72-
"ScipyBasinhopping" => chain(ScipyBasinhopping(), isboxed=true),
73-
"ScipyDualAnnealing" => chain(ScipyDualAnnealing(), isboxed=true),
74-
"ScipyShgo" => chain(ScipyShgo(), isboxed=true),
75-
"ScipyDirect" => chain(ScipyDirect(), isboxed=true),
76-
"ScipyBrute" => chain(ScipyBrute(), isboxed=true),
63+
#"ScipyBasinhopping" => chain(ScipyBasinhopping(), isboxed=true),
64+
"ScipyDualAnnealing" => chain(ScipyDualAnnealing(), isboxed=true),
65+
"ScipyShgo" => chain(ScipyShgo(), isboxed=true),
66+
"ScipyDirect" => chain(ScipyDirect(), isboxed=true),
67+
"ScipyBrute" => chain(ScipyBrute(), isboxed=true),
7768
# "NOMADOpt" => chain(NOMADOpt()), too much printing
7869
# "OptimizationPRIMA.UOBYQA()" => chain(OptimizationPRIMA.UOBYQA()), :StackOverflowError?
7970
# "OptimizationPRIMA.NEWUOA()" => OptimizationPRIMA.UOBYQA(),
@@ -100,52 +91,30 @@ f = test_functions[3]
10091

10192
single_setup = BenchmarkSetup(NLopt.GN_CRS2_LM(), isboxed=true)
10293

103-
sol = [BBOB.solve_problem(single_setup, f, 3, 5_000) for _ in 1:10]
104-
@info [s.objective < Δf + f.f_opt for s in sol]
94+
sol = [BBOB.solve_problem(single_setup, f, 3, 5_000) for in in 1:10]
95+
@info [sol.objective < Δf + f.f_opt for sol in sol]
10596

10697
p = plot(f, size = (600,600), zoom = 1.5)
107-
for s in sol
108-
scatter!(s.u[1:1], s.u[2:2], label = "", c = "blue", marker = :xcross, markersize = 5, markerstrokewidth = 0)
98+
for sol in sol
99+
scatter!(sol.u[1:1], sol.u[2:2], label="", c="blue", marker = :xcross, markersize=5, markerstrokewidth=0)
109100
end
110101
p
111102
```
112103

113104
## Test all
114105

115106
```julia
116-
results = Vector{Union{BBOB.BenchmarkResults, Nothing}}(undef, length(setup))
117-
118-
all_keys = collect(keys(setup))
119-
120-
for (i, algo) in enumerate(all_keys)
121-
if algo in python_optimizers
122-
try
123-
results[i] = run_bench(algo)
124-
catch err
125-
@warn "Benchmark for $algo failed – skipping" exception = err
126-
end
127-
end
128-
end
107+
results = Array{BBOB.BenchmarkResults}(undef, length(setup))
129108

130-
Threads.@threads for (i, algo) in collect(enumerate(all_keys))
131-
if !(algo in python_optimizers)
132-
try
133-
results[i] = run_bench(algo)
134-
catch err
135-
@warn "Benchmark for $algo failed – skipping" exception = err
136-
end
137-
end
109+
Threads.@threads for (i,algo) in collect(enumerate(keys(setup)))
110+
results[i] = run_bench(algo)
138111
end
139112

140-
labels = all_keys
141-
valid_idx = [i for i in eachindex(results) if results[i] !== nothing]
142-
results = results[valid_idx]
143-
labels = labels[valid_idx]
144-
145113
results
146114
```
147115

148116
```julia
117+
labels = collect(keys(setup))
149118
idx = sortperm([b.success_rate[end] for b in results], rev=true)
150119

151120
p = plot(xscale = :log10, legend = :outerright, size = (700,350), margin=10Plots.px, dpi=200)
@@ -173,6 +142,7 @@ p = heatmap(
173142
```
174143

175144
```julia
145+
labels = collect(keys(setup))
176146
idx = sortperm([b.distance_to_minimizer[end] for b in results], rev=false)
177147

178148
p = plot(xscale = :log10, legend = :outerright, size = (900,500), margin=10Plots.px, ylim = (0,5))
@@ -197,3 +167,4 @@ bar(
197167
legend = false, margin = 25Plots.px
198168
)
199169
```
170+

0 commit comments

Comments
 (0)