Skip to content

Commit ba4ea47

Browse files
committed
Feat: Add COBYLA optimizer (1 / n)
- Add and `COBYLA` class struct ` to `local_opt` - Add example to `examples/optimization_algorithm` Fixes #73
1 parent 7453590 commit ba4ea47

File tree

8 files changed

+248
-0
lines changed

8 files changed

+248
-0
lines changed
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
import numpy as np
2+
from gradient_free_optimizers import COBYLA
3+
4+
5+
def sphere_function(para: np.array):
6+
x = para[0]
7+
y = para[1]
8+
9+
return -(x * x + y * y)
10+
11+
def constraint_1(para):
12+
return para[0] > -5
13+
14+
search_space = {
15+
"x": np.arange(-10, 10, 0.1),
16+
"y": np.arange(-10, 10, 0.1),
17+
}
18+
19+
opt = COBYLA(
20+
search_space=search_space,
21+
rho_beg=1.0,
22+
rho_end=0.01,
23+
x_0=np.array([0.0, 0.0]),
24+
constraints=[constraint_1]
25+
)
26+
opt.search(sphere_function, n_iter=10)

src/gradient_free_optimizers/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
TreeStructuredParzenEstimators,
3232
ForestOptimizer,
3333
EnsembleOptimizer,
34+
COBYLA
3435
)
3536

3637

@@ -58,4 +59,5 @@
5859
"TreeStructuredParzenEstimators",
5960
"ForestOptimizer",
6061
"EnsembleOptimizer",
62+
"COBYLA"
6163
]
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
# Author: Simon Blanke
2+
3+
# License: MIT License
4+
5+
from typing import List, Dict, Literal, Union
6+
7+
from ..search import Search
8+
from ..optimizers import COBYLA as _COBYLA
9+
10+
11+
class COBYLA(_COBYLA, Search):
12+
def __init__(
13+
self,
14+
rho_beg,
15+
rho_end,
16+
x_0,
17+
search_space: Dict[str, list],
18+
initialize: Dict[
19+
Literal["grid", "vertices", "random", "warm_start"],
20+
Union[int, list[dict]],
21+
] = {"grid": 4, "random": 2, "vertices": 4},
22+
constraints: List[callable] = [],
23+
random_state: int = None,
24+
rand_rest_p: float = 0,
25+
nth_process: int = None,
26+
):
27+
initialize={"grid": 4, "random": 2, "vertices": 4},
28+
rand_rest_p=0,
29+
super().__init__(
30+
search_space=search_space,
31+
rho_beg=rho_beg,
32+
rho_end=rho_end,
33+
x_0=x_0,
34+
initialize=initialize,
35+
constraints=constraints,
36+
random_state=random_state,
37+
rand_rest_p=rand_rest_p,
38+
nth_process=nth_process,
39+
)

src/gradient_free_optimizers/optimizer_search/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
from .tree_structured_parzen_estimators import TreeStructuredParzenEstimators
2626
from .forest_optimization import ForestOptimizer
2727
from .ensemble_optimizer import EnsembleOptimizer
28+
from .COBYLA import COBYLA
2829

2930

3031
__all__ = [
@@ -51,4 +52,5 @@
5152
"TreeStructuredParzenEstimators",
5253
"ForestOptimizer",
5354
"EnsembleOptimizer",
55+
"COBYLA",
5456
]

src/gradient_free_optimizers/optimizers/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
RepulsingHillClimbingOptimizer,
99
SimulatedAnnealingOptimizer,
1010
DownhillSimplexOptimizer,
11+
COBYLA,
1112
)
1213

1314
from .global_opt import (
@@ -68,4 +69,5 @@
6869
"TreeStructuredParzenEstimators",
6970
"ForestOptimizer",
7071
"EnsembleOptimizer",
72+
"COBYLA"
7173
]

src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ def __init__(
3737

3838
self.search_space = search_space
3939
self.initialize = initialize
40+
print(constraints)
4041
self.constraints = constraints
4142
self.random_state = random_state
4243
self.rand_rest_p = rand_rest_p
Lines changed: 174 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,174 @@
1+
# copyright: hyperactive developers, MIT License (see LICENSE file)
2+
3+
# todo: write an informative docstring for the file or module, remove the above
4+
5+
__author__ = ["SimonBlanke", "yashnator"]
6+
7+
from ..base_optimizer import BaseOptimizer
8+
9+
import numpy as np
10+
11+
class COBYLA(BaseOptimizer):
12+
"""TODO: write docstring
13+
14+
COBYLA: Constrained Optimization BY Linear Approximation
15+
16+
Reference:
17+
Powell, M.J.D. (1994). A Direct Search Optimization Method That Models the Objective and Constraint Functions by Linear Interpolation.
18+
In: Gomez, S., Hennart, JP. (eds) Advances in Optimization and Numerical Analysis. Mathematics and Its Applications, vol 275. Springer,
19+
Dordrecht.
20+
Source: https://doi.org/10.1007/978-94-015-8330-5_4
21+
22+
Parameters
23+
----------
24+
rho_beg : int
25+
Initial tolerance (large enough for coarse exploration)
26+
rho_end : string, optional (default='default')
27+
Required tolerance
28+
x_0 : np.array
29+
Initial point for creating a simplex for the optimization
30+
31+
Examples
32+
--------
33+
# TODO: Write examples
34+
35+
>>> import numpy as np
36+
>>> from gradient_free_optimizers import COBYLA
37+
>>>
38+
>>> def sphere_function(para: np.array):
39+
... x = para[0]
40+
... y = para[1]
41+
... return -(x * x + y * y)
42+
>>>
43+
>>> def constraint_1(para):
44+
... return para[0] > -5
45+
>>>
46+
>>> search_space = {
47+
... "x": np.arange(-10, 10, 0.1),
48+
... "y": np.arange(-10, 10, 0.1),
49+
... }
50+
>>>
51+
>>> opt = COBYLA(
52+
... search_space=search_space,
53+
... rho_beg=1.0,
54+
... rho_end=0.01,
55+
... x_0=np.array([0.0, 0.0]),
56+
... constraints=[constraint_1]
57+
... )
58+
>>> opt.search(sphere_function, n_iter=10)
59+
"""
60+
61+
_tags = {
62+
"authors": ["SimonBlanke", "yashnator"],
63+
}
64+
65+
def _generate_initial_simplex(self, x_0_initial, rho_beg):
66+
n = x_0_initial.shape[0]
67+
arr = np.ones((n + 1, 1)) * x_0_initial + rho_beg * np.eye(n + 1, n)
68+
print(arr)
69+
return arr
70+
71+
def _vertices_to_oppsite_face_distances(self):
72+
"""
73+
Compute the distances from each vertex of an n-dimensional-simplex
74+
to the opposite (n-1)-dimensional face.
75+
76+
For each vertex, the opposite hyperplane is obtained after removing the current
77+
vertex and then finding the projection on the subspace spanned by the hyperplane.
78+
The distance is then the L2 norm between projection and the current vertex.
79+
80+
Args:
81+
self: instance of current COBYLA class
82+
83+
Returns:
84+
distances: (n+1,) array of distances from each vertex to its opposite face.
85+
"""
86+
distances = np.zeros(self.dim + 1)
87+
for j in range(0, self.dim + 1):
88+
face_vertices = np.delete(self.simplex, j, axis=0)
89+
start_vertex = face_vertices[0]
90+
A = np.stack([v - start_vertex for v in face_vertices[1:]], axis=1)
91+
b = self.simplex[j] - start_vertex
92+
93+
AtA = A.T @ A
94+
proj_j = A @ np.linalg.solve(AtA, A.T @ b)
95+
distances[j] = np.linalg.norm(b - proj_j)
96+
return distances
97+
98+
def _is_simplex_acceptable(self):
99+
eta = [np.linalg.norm(x - self.simplex[0]) for x in self.simplex]
100+
eta_constraint = self.beta * self._rho
101+
for eta_j in eta:
102+
if eta_j > eta_constraint:
103+
return False
104+
sigma = self._vertices_to_oppsite_face_distances()
105+
sigma_constraint = self.alpha * self._rho
106+
print(sigma)
107+
for sigma_j in sigma:
108+
if sigma_j < sigma_constraint:
109+
return False
110+
return True
111+
112+
def _eval_constraints(self, pos):
113+
# TODO: evalute constraints in optimized way
114+
115+
return None
116+
117+
def _merit_value(self, pos):
118+
# TODO: write the merit function using the _eval_constraints
119+
return 0
120+
121+
def __init__(
122+
self,
123+
search_space,
124+
x_0: np.array,
125+
rho_beg: int,
126+
rho_end: int,
127+
initialize={"grid": 4, "random": 2, "vertices": 4},
128+
constraints=[],
129+
random_state=None,
130+
rand_rest_p=0,
131+
nth_process=None,
132+
alpha = 0.25,
133+
beta = 2.1
134+
):
135+
super().__init__(
136+
search_space=search_space,
137+
initialize=initialize,
138+
constraints=constraints,
139+
random_state=random_state,
140+
rand_rest_p=rand_rest_p,
141+
nth_process=nth_process,
142+
)
143+
self.dim = np.shape(x_0)[0]
144+
self.simplex = self._generate_initial_simplex(x_0, rho_beg)
145+
self.rho_beg = rho_beg
146+
self.rho_end = rho_end
147+
self._rho = rho_beg
148+
self.state = 0
149+
self.FLAG = 0
150+
151+
if alpha <= 0 or alpha >= 1:
152+
raise ValueError("Parameter alpha must belong to the range (0, 1)")
153+
154+
if beta <= 1:
155+
raise ValueError("Parameter beta must belong to the range (1, ∞)")
156+
157+
self.alpha = alpha
158+
self.beta = beta
159+
160+
def iterate(self):
161+
# TODO: Impl
162+
return self.simplex[0]
163+
164+
def _score(self, pos):
165+
# TODO: Impl
166+
return 0.0
167+
168+
def evaluate(self, pos):
169+
# TODO: Impl
170+
return self._merit_value(pos)
171+
172+
def finish_search(self):
173+
# TODO: Impl
174+
return None

src/gradient_free_optimizers/optimizers/local_opt/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,13 @@
88
from .repulsing_hill_climbing_optimizer import RepulsingHillClimbingOptimizer
99
from .simulated_annealing import SimulatedAnnealingOptimizer
1010
from .downhill_simplex import DownhillSimplexOptimizer
11+
from .COBYLA import COBYLA
1112

1213
__all__ = [
1314
"HillClimbingOptimizer",
1415
"StochasticHillClimbingOptimizer",
1516
"RepulsingHillClimbingOptimizer",
1617
"SimulatedAnnealingOptimizer",
1718
"DownhillSimplexOptimizer",
19+
"COBYLA"
1820
]

0 commit comments

Comments
 (0)