1+ # copyright: hyperactive developers, MIT License (see LICENSE file)
2+
3+ # todo: write an informative docstring for the file or module, remove the above
4+
5+ __author__ = ["SimonBlanke" , "yashnator" ]
6+
7+ from ..base_optimizer import BaseOptimizer
8+
9+ import numpy as np
10+
11+ class COBYLA (BaseOptimizer ):
12+ """TODO: write docstring
13+
14+ COBYLA: Constrained Optimization BY Linear Approximation
15+
16+ Reference:
17+ Powell, M.J.D. (1994). A Direct Search Optimization Method That Models the Objective and Constraint Functions by Linear Interpolation.
18+ In: Gomez, S., Hennart, JP. (eds) Advances in Optimization and Numerical Analysis. Mathematics and Its Applications, vol 275. Springer,
19+ Dordrecht.
20+ Source: https://doi.org/10.1007/978-94-015-8330-5_4
21+
22+ Parameters
23+ ----------
24+ rho_beg : int
25+ Initial tolerance (large enough for coarse exploration)
26+ rho_end : string, optional (default='default')
27+ Required tolerance
28+ x_0 : np.array
29+ Initial point for creating a simplex for the optimization
30+
31+ Examples
32+ --------
33+ # TODO: Write examples
34+
35+ >>> import numpy as np
36+ >>> from gradient_free_optimizers import COBYLA
37+ >>>
38+ >>> def sphere_function(para: np.array):
39+ ... x = para[0]
40+ ... y = para[1]
41+ ... return -(x * x + y * y)
42+ >>>
43+ >>> def constraint_1(para):
44+ ... return para[0] > -5
45+ >>>
46+ >>> search_space = {
47+ ... "x": np.arange(-10, 10, 0.1),
48+ ... "y": np.arange(-10, 10, 0.1),
49+ ... }
50+ >>>
51+ >>> opt = COBYLA(
52+ ... search_space=search_space,
53+ ... rho_beg=1.0,
54+ ... rho_end=0.01,
55+ ... x_0=np.array([0.0, 0.0]),
56+ ... constraints=[constraint_1]
57+ ... )
58+ >>> opt.search(sphere_function, n_iter=10)
59+ """
60+
61+ _tags = {
62+ "authors" : ["SimonBlanke" , "yashnator" ],
63+ }
64+
65+ def _generate_initial_simplex (self , x_0_initial , rho_beg ):
66+ n = x_0_initial .shape [0 ]
67+ arr = np .ones ((n + 1 , 1 )) * x_0_initial + rho_beg * np .eye (n + 1 , n )
68+ print (arr )
69+ return arr
70+
71+ def _vertices_to_oppsite_face_distances (self ):
72+ """
73+ Compute the distances from each vertex of an n-dimensional-simplex
74+ to the opposite (n-1)-dimensional face.
75+
76+ For each vertex, the opposite hyperplane is obtained after removing the current
77+ vertex and then finding the projection on the subspace spanned by the hyperplane.
78+ The distance is then the L2 norm between projection and the current vertex.
79+
80+ Args:
81+ self: instance of current COBYLA class
82+
83+ Returns:
84+ distances: (n+1,) array of distances from each vertex to its opposite face.
85+ """
86+ distances = np .zeros (self .dim + 1 )
87+ for j in range (0 , self .dim + 1 ):
88+ face_vertices = np .delete (self .simplex , j , axis = 0 )
89+ start_vertex = face_vertices [0 ]
90+ A = np .stack ([v - start_vertex for v in face_vertices [1 :]], axis = 1 )
91+ b = self .simplex [j ] - start_vertex
92+
93+ AtA = A .T @ A
94+ proj_j = A @ np .linalg .solve (AtA , A .T @ b )
95+ distances [j ] = np .linalg .norm (b - proj_j )
96+ return distances
97+
98+ def _is_simplex_acceptable (self ):
99+ eta = [np .linalg .norm (x - self .simplex [0 ]) for x in self .simplex ]
100+ eta_constraint = self .beta * self ._rho
101+ for eta_j in eta :
102+ if eta_j > eta_constraint :
103+ return False
104+ sigma = self ._vertices_to_oppsite_face_distances ()
105+ sigma_constraint = self .alpha * self ._rho
106+ print (sigma )
107+ for sigma_j in sigma :
108+ if sigma_j < sigma_constraint :
109+ return False
110+ return True
111+
112+ def _eval_constraints (self , pos ):
113+ # TODO: evalute constraints in optimized way
114+
115+ return None
116+
117+ def _merit_value (self , pos ):
118+ # TODO: write the merit function using the _eval_constraints
119+ return 0
120+
121+ def __init__ (
122+ self ,
123+ search_space ,
124+ x_0 : np .array ,
125+ rho_beg : int ,
126+ rho_end : int ,
127+ initialize = {"grid" : 4 , "random" : 2 , "vertices" : 4 },
128+ constraints = [],
129+ random_state = None ,
130+ rand_rest_p = 0 ,
131+ nth_process = None ,
132+ alpha = 0.25 ,
133+ beta = 2.1
134+ ):
135+ super ().__init__ (
136+ search_space = search_space ,
137+ initialize = initialize ,
138+ constraints = constraints ,
139+ random_state = random_state ,
140+ rand_rest_p = rand_rest_p ,
141+ nth_process = nth_process ,
142+ )
143+ self .dim = np .shape (x_0 )[0 ]
144+ self .simplex = self ._generate_initial_simplex (x_0 , rho_beg )
145+ self .rho_beg = rho_beg
146+ self .rho_end = rho_end
147+ self ._rho = rho_beg
148+ self .state = 0
149+ self .FLAG = 0
150+
151+ if alpha <= 0 or alpha >= 1 :
152+ raise ValueError ("Parameter alpha must belong to the range (0, 1)" )
153+
154+ if beta <= 1 :
155+ raise ValueError ("Parameter beta must belong to the range (1, ∞)" )
156+
157+ self .alpha = alpha
158+ self .beta = beta
159+
160+ def iterate (self ):
161+ # TODO: Impl
162+ return self .simplex [0 ]
163+
164+ def _score (self , pos ):
165+ # TODO: Impl
166+ return 0.0
167+
168+ def evaluate (self , pos ):
169+ # TODO: Impl
170+ return self ._merit_value (pos )
171+
172+ def finish_search (self ):
173+ # TODO: Impl
174+ return None
0 commit comments