Skip to content

Commit 096a8d4

Browse files
committed
Add separate Output classes for each optimizer
1 parent 24c0fbc commit 096a8d4

File tree

1 file changed

+61
-20
lines changed

1 file changed

+61
-20
lines changed

climada/util/calibrate/impact_func.py

Lines changed: 61 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,10 @@
1212
LinearConstraint,
1313
NonlinearConstraint,
1414
minimize,
15+
OptimizeResult,
1516
)
1617
from bayes_opt import BayesianOptimization
18+
from bayes_opt.target_space import TargetSpace
1719

1820
from climada.hazard import Hazard
1921
from climada.entity import Exposures, ImpactFunc, ImpactFuncSet
@@ -117,18 +119,52 @@ class Output:
117119
The optimal parameters
118120
target : Number
119121
The target function value for the optimal parameters
120-
success : bool
121-
If the calibration succeeded. The definition depends on the actual optimization
122-
algorithm used.
123-
result
124-
A result object specific to the optimization algorithm used. See the optimizer
125-
documentation for details.
126122
"""
127123

128124
params: Mapping[str, Number]
129125
target: Number
130-
success: bool
131-
result: Optional[Any] = None
126+
127+
128+
@dataclass
129+
class ScipyMinimizeOptimizerOutput(Output):
130+
"""Output of a calibration with :py:class:`ScipyMinimizeOptimizer`
131+
132+
Attributes
133+
----------
134+
result : scipy.minimize.OptimizeResult
135+
The OptimizeResult instance returned by ``scipy.optimize.minimize``.
136+
"""
137+
138+
result: OptimizeResult
139+
140+
141+
@dataclass
142+
class BayesianOptimizerOutput(Output):
143+
"""Output of a calibration with :py:class:`BayesianOptimizer`
144+
145+
Attributes
146+
----------
147+
p_space : bayes_opt.target_space.TargetSpace
148+
The parameter space sampled by the optimizer.
149+
"""
150+
151+
p_space: TargetSpace
152+
153+
def p_space_to_dataframe(self):
154+
"""Return the sampled parameter space as pandas.DataFrame
155+
156+
Returns
157+
-------
158+
pandas.DataFrame
159+
Data frame whose columns are the parameter values and the associated target
160+
function value (``target``) and whose rows are the optimizer iterations.
161+
"""
162+
data = {
163+
self.p_space.keys[i]: self.p_space.params[..., i]
164+
for i in range(self.p_space.dim)
165+
}
166+
data["target"] = self.p_space.target
167+
return pd.DataFrame.from_dict(data)
132168

133169

134170
@dataclass
@@ -223,7 +259,7 @@ class ScipyMinimizeOptimizer(Optimizer):
223259
"""An optimization using scipy.optimize.minimize
224260
225261
By default, this optimizer uses the ``"trust-constr"`` method. This
226-
is advertised as the most general minimization method of the ``scipy`` pacjage and
262+
is advertised as the most general minimization method of the ``scipy`` package and
227263
supports bounds and constraints on the parameters. Users are free to choose
228264
any method of the catalogue, but must be aware that they might require different
229265
input parameters. These can be supplied via additional keyword arguments to
@@ -267,9 +303,10 @@ def run(self, **opt_kwargs) -> Output:
267303
268304
Returns
269305
-------
270-
output : Output
271-
The output of the optimization. The :py:attr:`Output.result` attribute
272-
stores the associated ``scipy.optimize.OptimizeResult`` instance.
306+
output : ScipyMinimizeOptimizerOutput
307+
The output of the optimization. The
308+
:py:attr:`ScipyMinimizeOptimizerOutput.result` attribute stores the
309+
associated ``scipy.optimize.OptimizeResult`` instance.
273310
"""
274311
# Parse kwargs
275312
params_init = opt_kwargs.pop("params_init")
@@ -296,12 +333,17 @@ def run(self, **opt_kwargs) -> Output:
296333
)
297334

298335
params = dict(zip(self._param_names, res.x.flat))
299-
return Output(params=params, target=res.fun, success=res.success, result=res)
336+
return ScipyMinimizeOptimizerOutput(params=params, target=res.fun, result=res)
300337

301338

302339
@dataclass
303340
class BayesianOptimizer(Optimizer):
304-
"""An optimization using bayes_opt.BayesianOptimization
341+
"""An optimization using ``bayes_opt.BayesianOptimization``
342+
343+
This optimizer reports the target function value for each parameter set and
344+
*maximizes* that value. Therefore, a higher target function value is better.
345+
The cost function, however, is still minimized: The target function is defined as
346+
the inverse of the cost function.
305347
306348
For details on the underlying optimizer, see
307349
https://github.com/bayesian-optimization/BayesianOptimization.
@@ -374,9 +416,9 @@ def _target_func(self, impact: Impact, data: pd.DataFrame) -> Number:
374416
def run(self, **opt_kwargs):
375417
"""Execute the optimization
376418
377-
Implementation detail: ``BayesianOptimization`` *maximizes* a target function.
378-
Therefore, this class inverts the cost function and used that as target
379-
function. The cost function is still minimized.
419+
``BayesianOptimization`` *maximizes* a target function. Therefore, this class
420+
inverts the cost function and used that as target function. The cost function is
421+
still minimized.
380422
381423
Parameters
382424
----------
@@ -405,9 +447,8 @@ def run(self, **opt_kwargs):
405447

406448
# Return output
407449
opt = self.optimizer.max
408-
return Output(
450+
return BayesianOptimizerOutput(
409451
params=opt["params"],
410452
target=opt["target"],
411-
success=True,
412-
result=self.optimizer,
453+
p_space=self.optimizer.space,
413454
)

0 commit comments

Comments
 (0)