From 2646d296301568c646e7228c6d9cc698f306eb00 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sat, 7 Sep 2024 17:16:09 +0530 Subject: [PATCH 01/28] MOO Docs updated blackboxoptim.md Added documentation for MOO in BBO --- .../src/optimization_packages/blackboxoptim.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index ca5b2385b..738add663 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -67,3 +67,21 @@ prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000, maxtime = 1000.0) ``` + +## Multi-objective optimization +The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your fitness function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: + +```@example MOO-BBO +using OptimizationBBO, Optimization, BlackBoxOptim +using SciMLBase: MultiObjectiveOptimizationFunction +u0 = [0.25, 0.25] +opt = OptimizationBBO.BBO_borg_moea() +function multi_obj_func_2(x, p) + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return (f1, f2) +end +mof = MultiObjectiveOptimizationFunction(multi_obj_func_2) +prob = Optimization.OptimizationProblem(mof_2, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) +sol = solve(prob_2, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) +``` From 79a297b11f24d287f987b280f5b8e335bb388ae6 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 20 Sep 2024 21:52:51 +0530 Subject: [PATCH 02/28] Update evolutionary.md MOO docs update. --- .../src/optimization_packages/evolutionary.md | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 9fa582c74..9ed2a2645 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -41,3 +41,23 @@ f = OptimizationFunction(rosenbrock) prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0]) sol = solve(prob, Evolutionary.CMAES(μ = 40, λ = 100)) ``` + +## Multi-objective optimization +The Rosenbrock and Ackley functions can be optimized using the `Evolutionary.NSGA2()` as follows: + +```@example MOO-Evolutionary +using Optimization, OptimizationEvolutionary +function func(x, p=nothing)::Vector{Float64} + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return [f1, f2] +end +initial_guess = [1.0, 1.0] +function gradient_multi_objective(x, p=nothing) + ForwardDiff.jacobian(func, x) +end +obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) +algorithm = OptimizationEvolutionary.NSGA2() +problem = OptimizationProblem(obj_func, initial_guess) +result = solve(problem, algorithm) +``` From 1394ec40eaea45fb84a3972f9dde200201131bf1 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 20 Sep 2024 21:59:09 +0530 Subject: [PATCH 03/28] Update metaheuristics.md MOO docs update. --- .../optimization_packages/metaheuristics.md | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index ae1694bcc..53b6750ad 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -70,3 +70,54 @@ sol = solve(prob, ECA(), use_initial = true, maxiters = 100000, maxtime = 1000.0 ### With Constraint Equations While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently does not relay these constraints. + + +## Multi-objective optimization +The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows: + +```@example MOO-Evolutionary +using Optimization, OptimizationEvolutionary +function zdt1(x) + f1 = x[1] + g = 1 + 9 * mean(x[2:end]) + h = 1 - sqrt(f1 / g) + f2 = g * h + # In this example, we have no constraints + gx = [0.0] # Inequality constraints (not used) + hx = [0.0] # Equality constraints (not used) + return [f1, f2], gx, hx +end +multi_obj_fun = MultiObjectiveOptimizationFunction((x, p) -> zdt1(x)) + +# Define the problem bounds +lower_bounds = [0.0, 0.0, 0.0] +upper_bounds = [1.0, 1.0, 1.0] + +# Define the initial guess +initial_guess = [0.5, 0.5, 0.5] + +# Create the optimization problem +prob = OptimizationProblem(multi_obj_fun, initial_guess; lb = lower_bounds, ub = upper_bounds) + +nobjectives = 2 +npartitions = 100 + +# reference points (Das and Dennis's method) +weights = gen_ref_dirs(nobjectives, npartitions) + +# Choose the algorithm as required. +alg1 = NSGA2() +alg2 = NSGA3() +alg3 = SPEA2() +alg4 = CCMO(NSGA2(N=100, p_m=0.001)) +alg5 = MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) +alg6 = SMS_EMOA() + +# Solve the problem +sol1 = solve(prob, alg1; maxiters = 100, use_initial = true) +sol2 = solve(prob, alg2; maxiters = 100, use_initial = true) +sol3 = solve(prob, alg3; maxiters = 100, use_initial = true) +sol4 = solve(prob, alg4) +sol5 = solve(prob, alg5; maxiters = 100, use_initial = true) +sol6 = solve(prob, alg6; maxiters = 100, use_initial = true) +``` From 5af0e2bb7943373ccf827cc22475632419edfc00 Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Sat, 21 Sep 2024 21:10:29 -0400 Subject: [PATCH 04/28] Update docs/src/optimization_packages/blackboxoptim.md --- docs/src/optimization_packages/blackboxoptim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index 738add663..010bd63fe 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -69,7 +69,7 @@ sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000, ``` ## Multi-objective optimization -The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your fitness function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: +The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: ```@example MOO-BBO using OptimizationBBO, Optimization, BlackBoxOptim From 37aa52bbd3a9e4212cdbbf3b90faf4cdc9838489 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 22 Sep 2024 10:22:10 +0530 Subject: [PATCH 05/28] Update Project.toml updated project.toml for the docs. --- docs/Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/Project.toml b/docs/Project.toml index 060076341..4a95e33df 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -42,6 +42,7 @@ SymbolicAnalysis = "4297ee4d-0239-47d8-ba5d-195ecdf594fe" Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" +BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" [compat] AmplNLWriter = "1" From 81f5e8a8d4d6049acdc67eb045cba0f4f8a9b285 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 22 Sep 2024 10:23:06 +0530 Subject: [PATCH 06/28] Update Project.toml Added compat for BBO. --- docs/Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/Project.toml b/docs/Project.toml index 4a95e33df..3559510a1 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -87,3 +87,4 @@ SymbolicAnalysis = "0.3" Symbolics = "6" Tracker = ">= 0.2" Zygote = ">= 0.5" +BlackBoxOptim = "0.6" From 238dcbe13bc2963978dd1b24d0c24d370afd3278 Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Sun, 22 Sep 2024 13:28:57 -0400 Subject: [PATCH 07/28] Update docs/src/optimization_packages/blackboxoptim.md --- docs/src/optimization_packages/blackboxoptim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index 010bd63fe..ae512ca9f 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -82,6 +82,6 @@ function multi_obj_func_2(x, p) return (f1, f2) end mof = MultiObjectiveOptimizationFunction(multi_obj_func_2) -prob = Optimization.OptimizationProblem(mof_2, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) +prob = Optimization.OptimizationProblem(mof, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) sol = solve(prob_2, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) ``` From 9198cb42a40d01befb47b5fae1c3e91f78704242 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Tue, 24 Sep 2024 02:06:07 +0530 Subject: [PATCH 08/28] Update metaheuristics.md Added required packages for MOO docs. --- docs/src/optimization_packages/metaheuristics.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 53b6750ad..13fd60651 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -76,7 +76,7 @@ While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows: ```@example MOO-Evolutionary -using Optimization, OptimizationEvolutionary +using Optimization, OptimizationEvolutionary,OptimizationMetaheuristics, Metaheuristics function zdt1(x) f1 = x[1] g = 1 + 9 * mean(x[2:end]) @@ -103,7 +103,7 @@ nobjectives = 2 npartitions = 100 # reference points (Das and Dennis's method) -weights = gen_ref_dirs(nobjectives, npartitions) +weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) # Choose the algorithm as required. alg1 = NSGA2() From be6388baa36b67d3edb5c5081023d36d37460828 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Tue, 24 Sep 2024 02:07:13 +0530 Subject: [PATCH 09/28] Update Project.toml added required packages for MOO --- docs/Project.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/Project.toml b/docs/Project.toml index 3559510a1..c722e4e2b 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -43,6 +43,7 @@ Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" +Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898" [compat] AmplNLWriter = "1" @@ -88,3 +89,5 @@ Symbolics = "6" Tracker = ">= 0.2" Zygote = ">= 0.5" BlackBoxOptim = "0.6" +Metaheuristics = "3" + From 5b3a0b518ccd5f9f164f5c91cc02968120a0185a Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Tue, 24 Sep 2024 02:08:41 +0530 Subject: [PATCH 10/28] Update blackboxoptim.md Corrected function names for MOO docs. --- docs/src/optimization_packages/blackboxoptim.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index ae512ca9f..ce17f3b1b 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -76,12 +76,12 @@ using OptimizationBBO, Optimization, BlackBoxOptim using SciMLBase: MultiObjectiveOptimizationFunction u0 = [0.25, 0.25] opt = OptimizationBBO.BBO_borg_moea() -function multi_obj_func_2(x, p) +function multi_obj_func(x, p) f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function return (f1, f2) end -mof = MultiObjectiveOptimizationFunction(multi_obj_func_2) +mof = MultiObjectiveOptimizationFunction(multi_obj_func) prob = Optimization.OptimizationProblem(mof, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) -sol = solve(prob_2, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) +sol = solve(prob, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) ``` From 6a1dceb75e81cad86dedc766a7770175d7dc5caf Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 26 Sep 2024 00:13:37 +0530 Subject: [PATCH 11/28] Update evolutionary.md Removed unnecessary FowardDiff function. --- docs/src/optimization_packages/evolutionary.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 9ed2a2645..03da1985a 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -53,10 +53,7 @@ function func(x, p=nothing)::Vector{Float64} return [f1, f2] end initial_guess = [1.0, 1.0] -function gradient_multi_objective(x, p=nothing) - ForwardDiff.jacobian(func, x) -end -obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) +obj_func = MultiObjectiveOptimizationFunction(func) algorithm = OptimizationEvolutionary.NSGA2() problem = OptimizationProblem(obj_func, initial_guess) result = solve(problem, algorithm) From c9d892fb8fba02040eb85f3749c454bb54f6c894 Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Thu, 26 Sep 2024 20:17:50 -0400 Subject: [PATCH 12/28] Update docs/src/optimization_packages/metaheuristics.md --- docs/src/optimization_packages/metaheuristics.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 13fd60651..3a6f37fdf 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -75,7 +75,7 @@ While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently ## Multi-objective optimization The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows: -```@example MOO-Evolutionary +```@example MOO-Metaheuristics using Optimization, OptimizationEvolutionary,OptimizationMetaheuristics, Metaheuristics function zdt1(x) f1 = x[1] From 564f53ae03a811fe4d76f99558dac66e151930fb Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Fri, 25 Oct 2024 23:13:53 -0400 Subject: [PATCH 13/28] Update docs/src/optimization_packages/evolutionary.md --- docs/src/optimization_packages/evolutionary.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 03da1985a..86cf27b71 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -46,7 +46,7 @@ sol = solve(prob, Evolutionary.CMAES(μ = 40, λ = 100)) The Rosenbrock and Ackley functions can be optimized using the `Evolutionary.NSGA2()` as follows: ```@example MOO-Evolutionary -using Optimization, OptimizationEvolutionary +using Optimization, OptimizationEvolutionary, Evolutionary function func(x, p=nothing)::Vector{Float64} f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function From b1fe7da37b16f3052c9416d86eb5047908302303 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 00:54:47 +0530 Subject: [PATCH 14/28] Update metaheuristics.md Added the package for the algorithms. --- docs/src/optimization_packages/metaheuristics.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 3a6f37fdf..2a7445ad2 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -106,12 +106,12 @@ npartitions = 100 weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) # Choose the algorithm as required. -alg1 = NSGA2() -alg2 = NSGA3() -alg3 = SPEA2() -alg4 = CCMO(NSGA2(N=100, p_m=0.001)) -alg5 = MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) -alg6 = SMS_EMOA() +alg1 = Metaheuristics.NSGA2() +alg2 = Metaheuristics.NSGA3() +alg3 = Metaheuristics.SPEA2() +alg4 = Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001)) +alg5 = Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) +alg6 = Metaheuristics.SMS_EMOA() # Solve the problem sol1 = solve(prob, alg1; maxiters = 100, use_initial = true) From 89abf73bd1b0544892f19740501266f066edb6dd Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 01:43:48 +0530 Subject: [PATCH 15/28] Update Project.toml Added evolutionary to the package. --- docs/Project.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/Project.toml b/docs/Project.toml index c722e4e2b..ad6cf088b 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -44,6 +44,7 @@ Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898" +Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6" [compat] AmplNLWriter = "1" @@ -90,4 +91,5 @@ Tracker = ">= 0.2" Zygote = ">= 0.5" BlackBoxOptim = "0.6" Metaheuristics = "3" +Evolutionary = "0.11" From 5aa12894f0938798d614be992a63b60908e416d9 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 02:04:28 +0530 Subject: [PATCH 16/28] Update metaheuristics.md updated algorithm call. --- .../optimization_packages/metaheuristics.md | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 2a7445ad2..891d4514c 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -105,19 +105,11 @@ npartitions = 100 # reference points (Das and Dennis's method) weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) -# Choose the algorithm as required. -alg1 = Metaheuristics.NSGA2() -alg2 = Metaheuristics.NSGA3() -alg3 = Metaheuristics.SPEA2() -alg4 = Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001)) -alg5 = Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) -alg6 = Metaheuristics.SMS_EMOA() - -# Solve the problem -sol1 = solve(prob, alg1; maxiters = 100, use_initial = true) -sol2 = solve(prob, alg2; maxiters = 100, use_initial = true) -sol3 = solve(prob, alg3; maxiters = 100, use_initial = true) -sol4 = solve(prob, alg4) -sol5 = solve(prob, alg5; maxiters = 100, use_initial = true) -sol6 = solve(prob, alg6; maxiters = 100, use_initial = true) +# Choose the algorithm and solve the problem +sol1 = solve(prob, NSGA2(); maxiters = 100, use_initial = true) +sol2 = solve(prob, NSGA3(); maxiters = 100, use_initial = true) +sol3 = solve(prob, SPEA2(); maxiters = 100, use_initial = true) +sol4 = solve(prob, CCMO(NSGA2(N=100, p_m=0.001))) +sol5 = solve(prob, MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true) +sol6 = solve(prob, SMS_EMOA(); maxiters = 100, use_initial = true) ``` From fb0181bd69074137b96ef55aeb762f833aad5039 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 02:06:09 +0530 Subject: [PATCH 17/28] Update blackboxoptim.md Correction of changeing tuple to vector. --- docs/src/optimization_packages/blackboxoptim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index ce17f3b1b..85f17cf93 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -69,7 +69,7 @@ sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000, ``` ## Multi-objective optimization -The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: +The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a vector of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: ```@example MOO-BBO using OptimizationBBO, Optimization, BlackBoxOptim From 6d4c6ba9f8cf573ac2882761bebb5527a578ceec Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 10:09:50 +0530 Subject: [PATCH 18/28] Update metaheuristics.md corrected algorithm calls. --- docs/src/optimization_packages/metaheuristics.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 891d4514c..f95ad505a 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -106,10 +106,10 @@ npartitions = 100 weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) # Choose the algorithm and solve the problem -sol1 = solve(prob, NSGA2(); maxiters = 100, use_initial = true) -sol2 = solve(prob, NSGA3(); maxiters = 100, use_initial = true) -sol3 = solve(prob, SPEA2(); maxiters = 100, use_initial = true) -sol4 = solve(prob, CCMO(NSGA2(N=100, p_m=0.001))) -sol5 = solve(prob, MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true) -sol6 = solve(prob, SMS_EMOA(); maxiters = 100, use_initial = true) +sol1 = solve(prob, Metaheuristics.NSGA2(); maxiters = 100, use_initial = true) +sol2 = solve(prob, Metaheuristics.NSGA3(); maxiters = 100, use_initial = true) +sol3 = solve(prob, Metaheuristics.SPEA2(); maxiters = 100, use_initial = true) +sol4 = solve(prob, Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001))) +sol5 = solve(prob, Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true) +sol6 = solve(prob, Metaheuristics.SMS_EMOA(); maxiters = 100, use_initial = true) ``` From d8fea39c687e8fac083290207f78308109030eda Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:05:48 +0530 Subject: [PATCH 19/28] Update OptimizationBBO.jl Adding argument mapping for num_dimensions and fitness_scheme. --- lib/OptimizationBBO/src/OptimizationBBO.jl | 38 ++++++++++++++++++---- 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 1b0c6e48f..9277fbcba 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -49,6 +49,8 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; abstol::Union{Number, Nothing} = nothing, reltol::Union{Number, Nothing} = nothing, verbose::Bool = false, + num_dimensions::Union{Number, Nothing} = nothing, + fitness_scheme::Union{String, Nothing} = nothing, kwargs...) if !isnothing(reltol) @warn "common reltol is currently not used by $(opt)" @@ -80,6 +82,16 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; mapped_args = (; mapped_args..., TraceMode = :silent) end + if isa(prob.f, MultiObjectiveOptimizationFunction) + if isnothing(num_dimensions) && isnothing(fitness_scheme) + mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true)) + elseif isnothing(num_dimensions) + mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = fitness_scheme) + elseif isnothing(fitness_scheme) + mapped_args = (; mapped_args..., NumDimensions = num_dimensions, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true)) + end + end + return mapped_args end @@ -177,13 +189,25 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ end end - opt_args = __map_optimizer_args(cache, cache.opt; - callback = cache.callback === Optimization.DEFAULT_CALLBACK && - cache.data === Optimization.DEFAULT_DATA ? - nothing : _cb, - cache.solver_args..., - maxiters = maxiters, - maxtime = maxtime) + if isa(cache.f, MultiObjectiveOptimizationFunction) + opt_args = __map_optimizer_args(cache, cache.opt; + callback = cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA ? + nothing : _cb, + cache.solver_args..., + maxiters = maxiters, + maxtime = maxtime, + num_dimensions = isnothing(cache.num_dimensions) ? nothing : cache.num_dimensions, + fitness_scheme = isnothing(cache.fitness_scheme) ? nothing : cache.fitness_scheme) + else + opt_args = __map_optimizer_args(cache, cache.opt; + callback = cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA ? + nothing : _cb, + cache.solver_args..., + maxiters = maxiters, + maxtime = maxtime) + end opt_setup = BlackBoxOptim.bbsetup(_loss; opt_args...) From 032e5b90274de11f9dffc5cd91717999eaf53b62 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:08:17 +0530 Subject: [PATCH 20/28] Update runtests.jl syntax change for num_dimensions and fitness_scheme passing in solve(). --- lib/OptimizationBBO/test/runtests.jl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index e5215fee8..08117d6b6 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -68,8 +68,8 @@ using Test mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1) prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub) - sol_1 = solve(prob_1, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_1 = solve(prob_1, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_1 ≠ nothing println("Solution for Sphere and Rastrigin: ", sol_1) @@ -88,8 +88,8 @@ using Test mof_2 = MultiObjectiveOptimizationFunction(multi_obj_func_2) prob_2 = Optimization.OptimizationProblem(mof_2, u0; lb = lb, ub = ub) - sol_2 = solve(prob_2, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_2 = solve(prob_2, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_2 ≠ nothing println("Solution for Rosenbrock and Ackley: ", sol_2) @@ -108,8 +108,8 @@ using Test mof_3 = MultiObjectiveOptimizationFunction(multi_obj_func_3) prob_3 = Optimization.OptimizationProblem(mof_3, u0; lb = lb, ub = ub) - sol_3 = solve(prob_3, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_3 = solve(prob_3, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_3 ≠ nothing println("Solution for ZDT1: ", sol_3) From a6ae41e2bc5aa40edcdbe073f07d5195ad6fcb78 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 2 Jun 2025 12:18:48 +0530 Subject: [PATCH 21/28] Create ode.md Docs for first draft of OptimizationODE.jl --- docs/src/optimization_packages/ode.md | 59 +++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 docs/src/optimization_packages/ode.md diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md new file mode 100644 index 000000000..15e1746ed --- /dev/null +++ b/docs/src/optimization_packages/ode.md @@ -0,0 +1,59 @@ +# OptimizationODE.jl + +**OptimizationODE.jl** provides ODE-based optimization methods as a solver plugin for [SciML's Optimization.jl](https://github.com/SciML/Optimization.jl). It wraps various ODE solvers to perform gradient-based optimization using continuous-time dynamics. + +## Installation + +```julia +using Pkg +Pkg.add(url="OptimizationODE.jl") +``` + +## Usage + +```julia +using OptimizationODE, Optimization, ADTypes, SciMLBase + +function f(x, p) + return sum(abs2, x) +end + +function g!(g, x, p) + @. g = 2 * x +end + +x0 = [2.0, -3.0] +p = [] + +f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad = g!) +prob_manual = OptimizationProblem(f_manual, x0) + +opt = ODEGradientDescent(dt=0.01) +sol = solve(prob_manual, opt; maxiters=50_000) + +@show sol.u +@show sol.objective +``` + +## Available Optimizers + +* `ODEGradientDescent(dt=...)` — uses the explicit Euler method. +* `RKChebyshevDescent()` — uses the ROCK2 method. +* `RKAccelerated()` — uses the Tsit5 Runge-Kutta method. +* `HighOrderDescent()` — uses the Vern7 high-order Runge-Kutta method. + +## Interface Details + +All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). + +### Keyword Arguments + +* `dt` — time step size (only for `ODEGradientDescent`). +* `maxiters` — maximum number of ODE steps. +* `callback` — function to observe progress. +* `progress=true` — enables live progress display. + +## Development + +Please refer to the `runtests.jl` file for a complete set of tests that demonstrate how each optimizer is used. + From 99d06eff4e7f4718c38f093091974026da846a56 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 2 Jun 2025 16:41:14 +0530 Subject: [PATCH 22/28] Update docs/src/optimization_packages/ode.md Co-authored-by: Christopher Rackauckas --- docs/src/optimization_packages/ode.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index 15e1746ed..152bb5e40 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -53,7 +53,3 @@ All optimizers require gradient information (either via automatic differentiatio * `callback` — function to observe progress. * `progress=true` — enables live progress display. -## Development - -Please refer to the `runtests.jl` file for a complete set of tests that demonstrate how each optimizer is used. - From b7e29271e6023ddb58877e3c77e022a7ee2ed88c Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 2 Jun 2025 16:49:29 +0530 Subject: [PATCH 23/28] Update ode.md Method descriptions for the solvers added. --- docs/src/optimization_packages/ode.md | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index 152bb5e40..f80568483 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -37,14 +37,21 @@ sol = solve(prob_manual, opt; maxiters=50_000) ## Available Optimizers -* `ODEGradientDescent(dt=...)` — uses the explicit Euler method. -* `RKChebyshevDescent()` — uses the ROCK2 method. -* `RKAccelerated()` — uses the Tsit5 Runge-Kutta method. -* `HighOrderDescent()` — uses the Vern7 high-order Runge-Kutta method. +All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: + +* `ODEGradientDescent(dt=...)` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems. + +* `RKChebyshevDescent()` — uses the ROCK2 solver, a stabilized explicit Runge-Kutta method suitable for stiff problems. It allows larger step sizes while maintaining stability. + +* `RKAccelerated()` — leverages the Tsit5 method, a 5th-order Runge-Kutta solver that achieves faster convergence for smooth problems by improving integration accuracy. + +* `HighOrderDescent()` — applies Vern7, a high-order (7th-order) explicit Runge-Kutta method for even more accurate integration. This can be beneficial for problems requiring high precision. + +You can also define a custom optimizer using the generic `ODEOptimizer(solver; dt=nothing)` constructor by supplying any ODE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/). ## Interface Details -All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). +All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). The optimization is performed by integrating the ODE defined by the negative gradient until a steady state is reached. ### Keyword Arguments From 36be3e80d4cc0bfb980dde0eb85d48ef4e06905f Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 23 Jun 2025 01:25:43 +0530 Subject: [PATCH 24/28] Update ode.md Updated docs. --- docs/src/optimization_packages/ode.md | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index f80568483..d36ba8b84 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -35,7 +35,7 @@ sol = solve(prob_manual, opt; maxiters=50_000) @show sol.objective ``` -## Available Optimizers +## Local-gradient based Optimizers All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: @@ -53,10 +53,3 @@ You can also define a custom optimizer using the generic `ODEOptimizer(solver; d All optimizers require gradient information (either via automatic differentiation or manually provided `grad!`). The optimization is performed by integrating the ODE defined by the negative gradient until a steady state is reached. -### Keyword Arguments - -* `dt` — time step size (only for `ODEGradientDescent`). -* `maxiters` — maximum number of ODE steps. -* `callback` — function to observe progress. -* `progress=true` — enables live progress display. - From 1cd41ca1b33d9d069b63c2de0fa2157238bde8eb Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Mon, 23 Jun 2025 01:30:28 +0530 Subject: [PATCH 25/28] Update ode.md updated docs --- docs/src/optimization_packages/ode.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index d36ba8b84..0144ac301 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -35,7 +35,7 @@ sol = solve(prob_manual, opt; maxiters=50_000) @show sol.objective ``` -## Local-gradient based Optimizers +## Local Gradient-based Optimizers All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: From 66919e45cbf3a75e75c60db426906e09aff21779 Mon Sep 17 00:00:00 2001 From: ParasPuneetSingh Date: Thu, 10 Jul 2025 22:31:12 +0530 Subject: [PATCH 26/28] DAE based solvers --- docs/src/optimization_packages/ode.md | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index 0144ac301..b5c6c7bcb 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -28,8 +28,8 @@ p = [] f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad = g!) prob_manual = OptimizationProblem(f_manual, x0) -opt = ODEGradientDescent(dt=0.01) -sol = solve(prob_manual, opt; maxiters=50_000) +opt = ODEGradientDescent() +sol = solve(prob_manual, opt; dt=0.01, maxiters=50_000) @show sol.u @show sol.objective @@ -39,7 +39,7 @@ sol = solve(prob_manual, opt; maxiters=50_000) All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: -* `ODEGradientDescent(dt=...)` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems. +* `ODEGradientDescent()` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems. * `RKChebyshevDescent()` — uses the ROCK2 solver, a stabilized explicit Runge-Kutta method suitable for stiff problems. It allows larger step sizes while maintaining stability. @@ -47,7 +47,15 @@ All provided optimizers are **gradient-based local optimizers** that solve optim * `HighOrderDescent()` — applies Vern7, a high-order (7th-order) explicit Runge-Kutta method for even more accurate integration. This can be beneficial for problems requiring high precision. -You can also define a custom optimizer using the generic `ODEOptimizer(solver; dt=nothing)` constructor by supplying any ODE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/). +## DAE-based Optimizers + +In addition to ODE-based optimizers, OptimizationODE.jl provides optimizers for differential-algebraic equation (DAE) constrained problems: + +* `DAEMassMatrix()` — uses the Rodas5 solver (from OrdinaryDiffEq.jl) for DAE problems with a mass matrix formulation. + +* `DAEIndexing()` — uses the IDA solver (from Sundials.jl) for DAE problems with index variable support. + +You can also define a custom optimizer using the generic `ODEOptimizer(solver)` or `DAEOptimizer(solver)` constructor by supplying any ODE or DAE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/) or [Sundials.jl](https://github.com/SciML/Sundials.jl). ## Interface Details From ebe2aec858c46d4164fb23067074777600f84b99 Mon Sep 17 00:00:00 2001 From: ParasPuneetSingh Date: Fri, 11 Jul 2025 01:17:34 +0530 Subject: [PATCH 27/28] MOO tests and code updates --- lib/OptimizationBBO/src/OptimizationBBO.jl | 34 +++++++++++--- lib/OptimizationBBO/test/runtests.jl | 44 +++++++++++++++++++ lib/OptimizationEvolutionary/test/runtests.jl | 38 ++++++++++++++++ 3 files changed, 111 insertions(+), 5 deletions(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 0e203de62..12d004928 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -53,9 +53,25 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; if !isnothing(reltol) @warn "common reltol is currently not used by $(opt)" end - mapped_args = (; kwargs...) - mapped_args = (; mapped_args..., Method = opt.method, - SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)]) + + # Determine number of objectives for multi-objective problems + if isa(prob.f, MultiObjectiveOptimizationFunction) + num_objectives = length(prob.f.cost_prototype) + mapped_args = (; kwargs...) + mapped_args = (; mapped_args..., Method = opt.method, + SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], + NumDimensions = length(prob.lb), + NumObjectives = num_objectives) + # FitnessScheme should be in opt, not the function + if hasproperty(opt, :FitnessScheme) + mapped_args = (; mapped_args..., FitnessScheme = opt.FitnessScheme) + end + else + mapped_args = (; kwargs...) + mapped_args = (; mapped_args..., Method = opt.method, + SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], + NumDimensions = length(prob.lb)) + end if !isnothing(callback) mapped_args = (; mapped_args..., CallbackFunction = callback, @@ -144,8 +160,16 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters) maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime) - _loss = function (θ) - cache.f(θ, cache.p) + + # Multi-objective: use out-of-place or in-place as appropriate + if isa(cache.f, MultiObjectiveOptimizationFunction) + if cache.f.iip + _loss = θ -> (cost = similar(cache.f.cost_prototype); cache.f.f(cost, θ, cache.p); cost) + else + _loss = θ -> cache.f.f(θ, cache.p) + end + else + _loss = θ -> cache.f(θ, cache.p) end opt_args = __map_optimizer_args(cache, cache.opt; diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index 1295465fc..a6aab5af2 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -34,6 +34,50 @@ using Test push!(loss_history, fitness) return false end + + @testset "In-place Multi-Objective Optimization" begin + function inplace_multi_obj!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + u0 = [0.25, 0.25] + lb = [0.0, 0.0] + ub = [2.0, 2.0] + cost_prototype = zeros(2) + mof_inplace = MultiObjectiveOptimizationFunction(inplace_multi_obj!; cost_prototype=cost_prototype) + prob_inplace = Optimization.OptimizationProblem(mof_inplace, u0; lb=lb, ub=ub) + sol_inplace = solve(prob_inplace, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) + @test sol_inplace ≠ nothing + @test length(sol_inplace.objective) == 2 + @test sol_inplace.objective[1] ≈ 6.9905986e-18 atol=1e-3 + @test sol_inplace.objective[2] ≈ 1.7763568e-15 atol=1e-3 + end + + @testset "Custom coalesce for Multi-Objective" begin + function multi_obj_tuple(x, p) + f1 = sum(x .^ 2) + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return (f1, f2) + end + coalesce_sum(cost, x, p) = sum(cost) + mof_coalesce = MultiObjectiveOptimizationFunction(multi_obj_tuple; coalesce=coalesce_sum) + prob_coalesce = Optimization.OptimizationProblem(mof_coalesce, u0; lb=lb, ub=ub) + sol_coalesce = solve(prob_coalesce, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) + @test sol_coalesce ≠ nothing + @test sol_coalesce.objective[1] ≈ 6.9905986e-18 atol=1e-3 + @test sol_coalesce.objective[2] ≈ 1.7763568e-15 atol=1e-3 + @test mof_coalesce.coalesce([1.0, 2.0], [0.0, 0.0], nothing) == 3.0 + end + + @testset "Error if in-place MultiObjectiveOptimizationFunction without cost_prototype" begin + function inplace_multi_obj_err!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + @test_throws ArgumentError MultiObjectiveOptimizationFunction(inplace_multi_obj_err!) + end sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), callback = cb) # println(fitness_progress_history) @test !isempty(fitness_progress_history) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 1bd810664..13c0f4fa3 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -40,6 +40,44 @@ Random.seed!(1234) if state.iter % 10 == 0 println(state.u) end + + @testset "In-place Multi-Objective Optimization" begin + function inplace_multi_obj!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + u0 = [0.25, 0.25] + cost_prototype = zeros(2) + mof_inplace = MultiObjectiveOptimizationFunction(inplace_multi_obj!; cost_prototype=cost_prototype) + prob_inplace = OptimizationProblem(mof_inplace, u0) + sol_inplace = solve(prob_inplace, NSGA2()) + @test sol_inplace ≠ nothing + @test length(sol_inplace.objective) == 2 + end + + @testset "Custom coalesce for Multi-Objective" begin + function multi_obj_tuple(x, p) + f1 = sum(x .^ 2) + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return (f1, f2) + end + coalesce_sum(cost, x, p) = sum(cost) + mof_coalesce = MultiObjectiveOptimizationFunction(multi_obj_tuple; coalesce=coalesce_sum) + prob_coalesce = OptimizationProblem(mof_coalesce, u0) + sol_coalesce = solve(prob_coalesce, NSGA2()) + @test sol_coalesce ≠ nothing + @test mof_coalesce.coalesce([1.0, 2.0], [0.0, 0.0], nothing) == 3.0 + end + + @testset "Error if in-place MultiObjectiveOptimizationFunction without cost_prototype" begin + function inplace_multi_obj_err!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + @test_throws ArgumentError MultiObjectiveOptimizationFunction(inplace_multi_obj_err!) + end return false end solve(prob, CMAES(μ = 40, λ = 100), callback = cb, maxiters = 100) From 7fbca500f3d022e45e04866c36ec5e4f8dd4ac98 Mon Sep 17 00:00:00 2001 From: ParasPuneetSingh Date: Sun, 20 Jul 2025 19:22:49 +0530 Subject: [PATCH 28/28] import changes --- lib/OptimizationBBO/src/OptimizationBBO.jl | 11 +++--- lib/OptimizationBBO/test/runtests.jl | 40 ++++++++-------------- 2 files changed, 19 insertions(+), 32 deletions(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 12d004928..a3b002d5e 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -1,9 +1,9 @@ module OptimizationBBO using Reexport -import Optimization -import BlackBoxOptim, Optimization.SciMLBase -import Optimization.SciMLBase: MultiObjectiveOptimizationFunction +@reexport using Optimization +using BlackBoxOptim, Optimization.SciMLBase +using Optimization.SciMLBase: MultiObjectiveOptimizationFunction abstract type BBO end @@ -126,8 +126,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ LC, UC, S, - O <: - BBO, + O <: BBO, D, P, C @@ -163,7 +162,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ # Multi-objective: use out-of-place or in-place as appropriate if isa(cache.f, MultiObjectiveOptimizationFunction) - if cache.f.iip + if is_inplace(cache.f) _loss = θ -> (cost = similar(cache.f.cost_prototype); cache.f.f(cost, θ, cache.p); cost) else _loss = θ -> cache.f.f(θ, cache.p) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index a6aab5af2..20b23da19 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -1,4 +1,5 @@ using OptimizationBBO, Optimization, BlackBoxOptim +using Optimization.SciMLBase using Optimization.SciMLBase: MultiObjectiveOptimizationFunction using Test @@ -35,17 +36,20 @@ using Test return false end + # Define the initial guess and bounds ONCE for all tests + u0 = [0.25, 0.25] + lb = [0.0, 0.0] + ub = [2.0, 2.0] + opt = OptimizationBBO.BBO_borg_moea() + @testset "In-place Multi-Objective Optimization" begin function inplace_multi_obj!(cost, x, p) cost[1] = sum(x .^ 2) cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) return nothing end - u0 = [0.25, 0.25] - lb = [0.0, 0.0] - ub = [2.0, 2.0] cost_prototype = zeros(2) - mof_inplace = MultiObjectiveOptimizationFunction(inplace_multi_obj!; cost_prototype=cost_prototype) + mof_inplace = MultiObjectiveOptimizationFunction{true}(inplace_multi_obj!, SciMLBase.NoAD(); cost_prototype=cost_prototype) prob_inplace = Optimization.OptimizationProblem(mof_inplace, u0; lb=lb, ub=ub) sol_inplace = solve(prob_inplace, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) @test sol_inplace ≠ nothing @@ -60,24 +64,14 @@ using Test f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) return (f1, f2) end - coalesce_sum(cost, x, p) = sum(cost) - mof_coalesce = MultiObjectiveOptimizationFunction(multi_obj_tuple; coalesce=coalesce_sum) + mof_coalesce = MultiObjectiveOptimizationFunction{false}(multi_obj_tuple, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_coalesce = Optimization.OptimizationProblem(mof_coalesce, u0; lb=lb, ub=ub) sol_coalesce = solve(prob_coalesce, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) @test sol_coalesce ≠ nothing @test sol_coalesce.objective[1] ≈ 6.9905986e-18 atol=1e-3 @test sol_coalesce.objective[2] ≈ 1.7763568e-15 atol=1e-3 - @test mof_coalesce.coalesce([1.0, 2.0], [0.0, 0.0], nothing) == 3.0 end - @testset "Error if in-place MultiObjectiveOptimizationFunction without cost_prototype" begin - function inplace_multi_obj_err!(cost, x, p) - cost[1] = sum(x .^ 2) - cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) - return nothing - end - @test_throws ArgumentError MultiObjectiveOptimizationFunction(inplace_multi_obj_err!) - end sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), callback = cb) # println(fitness_progress_history) @test !isempty(fitness_progress_history) @@ -99,13 +93,7 @@ using Test maxtime = 5) end - # Define the initial guess and bounds - u0 = [0.25, 0.25] - lb = [0.0, 0.0] - ub = [2.0, 2.0] - - # Define the optimizer - opt = OptimizationBBO.BBO_borg_moea() + # ...existing code... @testset "Multi-Objective Optimization Tests" begin @@ -117,7 +105,7 @@ using Test return (f1, f2) end - mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1) + mof_1 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_1, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub) sol_1 = solve(prob_1, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) @@ -144,7 +132,7 @@ using Test return false end - mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1) + mof_1 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_1, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub) sol_1 = solve(prob_1, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true), @@ -170,7 +158,7 @@ using Test return (f1, f2) end - mof_2 = MultiObjectiveOptimizationFunction(multi_obj_func_2) + mof_2 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_2, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_2 = Optimization.OptimizationProblem(mof_2, u0; lb = lb, ub = ub) sol_2 = solve(prob_2, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) @@ -190,7 +178,7 @@ using Test return (f1, f2) end - mof_3 = MultiObjectiveOptimizationFunction(multi_obj_func_3) + mof_3 = SciMLBase.MultiObjectiveOptimizationFunction{false}(multi_obj_func_3, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_3 = Optimization.OptimizationProblem(mof_3, u0; lb = lb, ub = ub) sol_3 = solve(prob_3, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true))