Skip to content

Commit ca1424b

Browse files
timruhkopftimruhkopfbenjamc
authored
[doc] #1038, #987 added warm starting using ask and tell (#1120)
* [doc] #1038, #987 added warm starting using ask and tell as a doc example (didn't warrant its own advanced_usage example) * [doc] addendum: initial design, warm start and n_trial explanation. * Format * Format and fix ConfigSpace deprecation * Fix typos * Fix merge * Update warmstarting example * Update index * Update CHANGELOG.md --------- Co-authored-by: timruhkopf <[email protected]> Co-authored-by: benjamc <[email protected]>
1 parent 53fdea2 commit ca1424b

File tree

5 files changed

+210
-2
lines changed

5 files changed

+210
-2
lines changed

CHANGELOG.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,14 @@
77
## Dependencies
88
- Allow numpy >= 2.x (#1146)
99

10+
# Examples
11+
- Add warmstarting example (#1120)
12+
1013
# 2.2.0
1114

12-
## Features
15+
## Examples
1316
- Add example to specify total budget (fidelity units) instead of n_trials for multi-fidelity/Hyperband (#1121)
17+
- Add example for warmstarting (#1120)
1418

1519
## Dependencies
1620
- Update numpy NaN (#1122) and restrict numpy
Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
Warmstarting SMAC
2+
======================================
3+
4+
With the ask and tell interface, we can support warmstarting SMAC. We can communicate rich
5+
information about the previous trials to SMAC using `TrialInfo` and `TrialValue` instances.
6+
7+
We can communicate using the following objects:
8+
9+
.. code-block:: python
10+
11+
class TrialValue:
12+
"""Values of a trial.
13+
14+
Parameters
15+
----------
16+
cost : float | list[float]
17+
time : float, defaults to 0.0
18+
status : StatusType, defaults to StatusType.SUCCESS
19+
starttime : float, defaults to 0.0
20+
endtime : float, defaults to 0.0
21+
additional_info : dict[str, Any], defaults to {}
22+
"""
23+
24+
class TrialInfo:
25+
"""Information about a trial.
26+
27+
Parameters
28+
----------
29+
config : Configuration
30+
instance : str | None, defaults to None
31+
seed : int | None, defaults to None
32+
budget : float | None, defaults to None
33+
"""
34+
35+
36+
Usage Example
37+
~~~~~~~~~~~~~
38+
See `examples/1_basics/8_warmstart.py`.
39+
40+
41+
.. code-block:: python
42+
43+
from __future__ import annotations
44+
45+
from smac.scenario import Scenario
46+
from smac.facade import HyperparameterOptimizationFacade
47+
from ConfigSpace import Configuration, ConfigurationSpace, Float
48+
from smac.runhistory.dataclasses import TrialValue, TrialInfo
49+
50+
51+
class Rosenbrock2D:
52+
@property
53+
def configspace(self) -> ConfigurationSpace:
54+
cs = ConfigurationSpace(seed=0)
55+
x0 = Float("x0", (-5, 10), default=-3)
56+
x1 = Float("x1", (-5, 10), default=-4)
57+
cs.add([x0, x1])
58+
59+
return cs
60+
61+
def evaluate(self, config: Configuration, seed: int = 0) -> float:
62+
"""The 2-dimensional Rosenbrock function as a toy model.
63+
The Rosenbrock function is well know in the optimization community and
64+
often serves as a toy problem. It can be defined for arbitrary
65+
dimensions. The minimium is always at x_i = 1 with a function value of
66+
zero. All input parameters are continuous. The search domain for
67+
all x's is the interval [-5, 10].
68+
"""
69+
x1 = config["x0"]
70+
x2 = config["x1"]
71+
72+
cost = 100.0 * (x2 - x1**2.0) ** 2.0 + (1 - x1) ** 2.0
73+
return cost
74+
75+
76+
if __name__ == "__main__":
77+
SEED = 12345
78+
task = Rosenbrock2D()
79+
80+
# Previous evaluations
81+
# X vectors need to be connected to the configuration space
82+
configurations = [
83+
Configuration(task.configspace, {'x0':1, 'x1':2}),
84+
Configuration(task.configspace, {'x0':-1, 'x1':3}),
85+
Configuration(task.configspace, {'x0':5, 'x1':5}),
86+
]
87+
costs = [task.evaluate(c, seed=SEED) for c in configurations]
88+
89+
# Define optimization problem and budget
90+
scenario = Scenario(task.configspace, deterministic=False, n_trials=30)
91+
intensifier = HyperparameterOptimizationFacade.get_intensifier(scenario, max_config_calls=1)
92+
smac = HyperparameterOptimizationFacade(
93+
scenario,
94+
task.evaluate,
95+
intensifier=intensifier,
96+
overwrite=True,
97+
98+
# Modify the initial design to use our custom initial design
99+
initial_design=HyperparameterOptimizationFacade.get_initial_design(
100+
scenario,
101+
n_configs=0, # Do not use the default initial design
102+
additional_configs=configurations # Use the configurations previously evaluated as initial design
103+
# This only passes the configurations but not the cost!
104+
# So in order to actually use the custom, pre-evaluated initial design
105+
# we need to tell those trials, like below.
106+
)
107+
)
108+
109+
# Convert previously evaluated configurations into TrialInfo and TrialValue instances to pass to SMAC
110+
trial_infos = [TrialInfo(config=c, seed=SEED) for c in configurations]
111+
trial_values = [TrialValue(cost=c) for c in costs]
112+
113+
# Warmstart SMAC with the trial information and values
114+
for info, value in zip(trial_infos, trial_values):
115+
smac.tell(info, value)
116+
117+
# Optimize as usual
118+
smac.optimize()
119+
120+
For more details on ask and tell consult `advanced_usage/5_ask_and_tell`.

docs/advanced_usage/index.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ Navigation
1818
3_multi_objective
1919
4_instances
2020
5_ask_and_tell
21+
5.1_warmstarting
2122
6_commandline
2223
7_stopping_criteria
2324
8_logging

examples/1_basics/8_warmstart.py

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
"""
2+
Warmstarting SMAC
3+
======================================
4+
5+
With the ask and tell interface, we can support warmstarting SMAC. We can communicate rich
6+
information about the previous trials to SMAC using `TrialInfo` and `TrialValue` instances.
7+
For more details on ask and tell consult `advanced_usage/5_ask_and_tell`.
8+
"""
9+
from __future__ import annotations
10+
11+
from smac.scenario import Scenario
12+
from smac.facade import HyperparameterOptimizationFacade
13+
from ConfigSpace import Configuration, ConfigurationSpace, Float
14+
from smac.runhistory.dataclasses import TrialValue, TrialInfo
15+
16+
17+
class Rosenbrock2D:
18+
@property
19+
def configspace(self) -> ConfigurationSpace:
20+
cs = ConfigurationSpace(seed=0)
21+
x0 = Float("x0", (-5, 10), default=-3)
22+
x1 = Float("x1", (-5, 10), default=-4)
23+
cs.add([x0, x1])
24+
25+
return cs
26+
27+
def evaluate(self, config: Configuration, seed: int = 0) -> float:
28+
"""The 2-dimensional Rosenbrock function as a toy model.
29+
The Rosenbrock function is well know in the optimization community and
30+
often serves as a toy problem. It can be defined for arbitrary
31+
dimensions. The minimium is always at x_i = 1 with a function value of
32+
zero. All input parameters are continuous. The search domain for
33+
all x's is the interval [-5, 10].
34+
"""
35+
x1 = config["x0"]
36+
x2 = config["x1"]
37+
38+
cost = 100.0 * (x2 - x1**2.0) ** 2.0 + (1 - x1) ** 2.0
39+
return cost
40+
41+
42+
if __name__ == "__main__":
43+
SEED = 12345
44+
task = Rosenbrock2D()
45+
46+
# Previous evaluations
47+
# X vectors need to be connected to the configuration space
48+
configurations = [
49+
Configuration(task.configspace, {'x0':1, 'x1':2}),
50+
Configuration(task.configspace, {'x0':-1, 'x1':3}),
51+
Configuration(task.configspace, {'x0':5, 'x1':5}),
52+
]
53+
costs = [task.evaluate(c, seed=SEED) for c in configurations]
54+
55+
# Define optimization problem and budget
56+
scenario = Scenario(task.configspace, deterministic=False, n_trials=30)
57+
intensifier = HyperparameterOptimizationFacade.get_intensifier(scenario, max_config_calls=1)
58+
smac = HyperparameterOptimizationFacade(
59+
scenario,
60+
task.evaluate,
61+
intensifier=intensifier,
62+
overwrite=True,
63+
64+
# Modify the initial design to use our custom initial design
65+
initial_design=HyperparameterOptimizationFacade.get_initial_design(
66+
scenario,
67+
n_configs=0, # Do not use the default initial design
68+
additional_configs=configurations # Use the configurations previously evaluated as initial design
69+
# This only passes the configurations but not the cost!
70+
# So in order to actually use the custom, pre-evaluated initial design
71+
# we need to tell those trials, like below.
72+
)
73+
)
74+
75+
# Convert previously evaluated configurations into TrialInfo and TrialValue instances to pass to SMAC
76+
trial_infos = [TrialInfo(config=c, seed=SEED) for c in configurations]
77+
trial_values = [TrialValue(cost=c) for c in costs]
78+
79+
# Warmstart SMAC with the trial information and values
80+
for info, value in zip(trial_infos, trial_values):
81+
smac.tell(info, value)
82+
83+
# Optimize as usual
84+
smac.optimize()

tests/test_runhistory/test_runhistory_encoder.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,6 @@ def test_transform(runhistory, make_scenario, configspace_small, configs):
104104
assert Y.tolist() != Y1.tolist()
105105
assert ((X <= upper) & (X >= lower)).all()
106106

107-
108107
def test_transform_conditionals(runhistory, make_scenario, configspace_large):
109108
scenario = make_scenario(configspace_large)
110109

0 commit comments

Comments
 (0)