Skip to content

Commit 24c0fbc

Browse files
committed
Add more docstrings and simplify imports through __init__
1 parent d321832 commit 24c0fbc

File tree

2 files changed

+94
-4
lines changed

2 files changed

+94
-4
lines changed

climada/util/calibrate/__init__.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
"""Impact function calibration module"""
2+
3+
from .impact_func import (
4+
Input,
5+
ScipyMinimizeOptimizer,
6+
BayesianOptimizer,
7+
cost_func_rmse,
8+
)

climada/util/calibrate/impact_func.py

Lines changed: 86 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,24 @@ def run(self, **opt_kwargs) -> Output:
220220

221221
@dataclass
222222
class ScipyMinimizeOptimizer(Optimizer):
223-
"""An optimization using scipy.optimize.minimize"""
223+
"""An optimization using scipy.optimize.minimize
224+
225+
By default, this optimizer uses the ``"trust-constr"`` method. This
226+
is advertised as the most general minimization method of the ``scipy`` pacjage and
227+
supports bounds and constraints on the parameters. Users are free to choose
228+
any method of the catalogue, but must be aware that they might require different
229+
input parameters. These can be supplied via additional keyword arguments to
230+
:py:meth:`run`.
231+
232+
See https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html
233+
for details.
234+
235+
Parameters
236+
----------
237+
input : Input
238+
The input data for this optimizer. Supported data types for
239+
:py:attr:`constraint` might vary depending on the minimization method used.
240+
"""
224241

225242
def __post_init__(self):
226243
"""Create a private attribute for storing the parameter names"""
@@ -284,7 +301,47 @@ def run(self, **opt_kwargs) -> Output:
284301

285302
@dataclass
286303
class BayesianOptimizer(Optimizer):
287-
"""An optimization using bayes_opt.BayesianOptimization"""
304+
"""An optimization using bayes_opt.BayesianOptimization
305+
306+
For details on the underlying optimizer, see
307+
https://github.com/bayesian-optimization/BayesianOptimization.
308+
309+
Parameters
310+
----------
311+
input : Input
312+
The input data for this optimizer. See the Notes below for input requirements.
313+
verbose : int, optional
314+
Verbosity of the optimizer output. Defaults to 1.
315+
random_state : int, optional
316+
Seed for initializing the random number generator. Defaults to 1.
317+
allow_duplicate_points : bool, optional
318+
Allow the optimizer to sample the same points in parameter space multiple times.
319+
This may happen if the parameter space is tightly bound or constrained. Defaults
320+
to ``True``.
321+
bayes_opt_kwds : dict
322+
Additional keyword arguments passed to the ``BayesianOptimization`` constructor.
323+
324+
Notes
325+
-----
326+
The following requirements apply to the parameters of :py:class:`Input` when using
327+
this class:
328+
329+
bounds
330+
Setting ``bounds`` in the ``Input`` is required because the optimizer first
331+
"explores" the bound parameter space and then narrows its search to regions
332+
where the cost function is low.
333+
constraints
334+
Must be an instance of ``scipy.minimize.LinearConstraint`` or
335+
``scipy.minimize.NonlinearConstraint``. See
336+
https://github.com/bayesian-optimization/BayesianOptimization/blob/master/examples/constraints.ipynb
337+
for further information.
338+
339+
Attributes
340+
----------
341+
optimizer : bayes_opt.BayesianOptimization
342+
The optimizer instance of this class. Will be returned by :py:meth:`run` in
343+
:py:attr:`Output.result`.
344+
"""
288345

289346
verbose: InitVar[int] = 1
290347
random_state: InitVar[int] = 1
@@ -298,6 +355,9 @@ def __post_init__(
298355
if bayes_opt_kwds is None:
299356
bayes_opt_kwds = {}
300357

358+
if self.input.bounds is None:
359+
raise ValueError("Input.bounds is required for this optimizer")
360+
301361
self.optimizer = BayesianOptimization(
302362
f=self._opt_func,
303363
pbounds=self.input.bounds,
@@ -306,13 +366,35 @@ def __post_init__(
306366
allow_duplicate_points=allow_duplicate_points,
307367
**bayes_opt_kwds,
308368
)
309-
369+
310370
def _target_func(self, impact: Impact, data: pd.DataFrame) -> Number:
311371
"""Invert the cost function because BayesianOptimization maximizes the target"""
312372
return 1 / self.input.cost_func(impact, data)
313373

314374
def run(self, **opt_kwargs):
315-
"""Execute the optimization"""
375+
"""Execute the optimization
376+
377+
Implementation detail: ``BayesianOptimization`` *maximizes* a target function.
378+
Therefore, this class inverts the cost function and used that as target
379+
function. The cost function is still minimized.
380+
381+
Parameters
382+
----------
383+
init_points : int, optional
384+
Number of initial samples taken from the parameter space. Defaults to 10^N,
385+
where N is the number of parameters.
386+
n_iter : int, optional
387+
Number of iteration steps after initial sampling. Defaults to 10^N, where N
388+
is the number of parameters.
389+
opt_kwargs
390+
Further keyword arguments passed to ``BayesianOptimization.maximize``.
391+
392+
Returns
393+
-------
394+
output : Output
395+
Optimization output. :py:attr:`Output.result` will be the
396+
:py:attr:`optimizer` used by this class instance.
397+
"""
316398
# Retrieve parameters
317399
num_params = len(self.input.bounds)
318400
init_points = opt_kwargs.pop("init_points", 10**num_params)

0 commit comments

Comments
 (0)