Skip to content

Commit 840f880

Browse files
keep sonarcube happy
1 parent c65cec3 commit 840f880

File tree

1 file changed

+15
-15
lines changed

1 file changed

+15
-15
lines changed

kernel_tuner/strategies/bayes_opt.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -28,19 +28,10 @@
2828

2929
# _options dict is used for generating documentation, but is not used to check for unsupported strategy_options in bayes_opt
3030
_options = dict(
31-
covariancekernel=(
32-
'The Covariance kernel to use, choose any from "constantrbf", "rbf", "matern32", "matern52"',
33-
"matern32",
34-
),
31+
covariancekernel=('The Covariance kernel to use, choose any from "constantrbf", "rbf", "matern32", "matern52"', "matern32"),
3532
covariancelengthscale=("The covariance length scale", 1.5),
36-
method=(
37-
"The Bayesian Optimization method to use, choose any from " + ", ".join(supported_methods),
38-
"multi-ultrafast",
39-
),
40-
samplingmethod=(
41-
"Method used for initial sampling the parameter space, either random or Latin Hypercube Sampling (LHS)",
42-
"lhs",
43-
),
33+
method=("The Bayesian Optimization method to use, choose any from " + ", ".join(supported_methods), "multi-ultrafast"),
34+
samplingmethod=("Method used for initial sampling the parameter space, either random or Latin Hypercube Sampling (LHS)", "lhs"),
4435
popsize=("Number of initial samples", 20),
4536
)
4637

@@ -110,7 +101,7 @@ def tune(searchspace: Searchspace, runner, tuning_options):
110101
:rtype: list(dict()), dict()
111102
112103
"""
113-
# we don't actually use this for Bayesian Optimization, but it is used to check for unsupported options
104+
# we don't actually use this for Bayesian Optimization, but it is used to check for unsupported options
114105
get_options(tuning_options.strategy_options, _options, unsupported=["x0"])
115106

116107
max_fevals = tuning_options.strategy_options.get("max_fevals", 100)
@@ -145,7 +136,13 @@ def tune(searchspace: Searchspace, runner, tuning_options):
145136
# initialize and optimize
146137
try:
147138
bo = BayesianOptimization(
148-
parameter_space, searchspace, removed_tune_params, tuning_options, normalize_dict, denormalize_dict, cost_func
139+
parameter_space,
140+
searchspace,
141+
removed_tune_params,
142+
tuning_options,
143+
normalize_dict,
144+
denormalize_dict,
145+
cost_func,
149146
)
150147
except StopCriterionReached:
151148
warnings.warn(
@@ -851,7 +848,10 @@ def __optimize_multi_ultrafast(self, max_fevals, predict_eval_ratio=5):
851848
while self.fevals < max_fevals:
852849
aqfs = self.multi_afs
853850
# if we take the prediction only once, we want to go from most exploiting to most exploring, because the more exploiting an AF is, the more it relies on non-stale information from the model
854-
fit_observations = last_prediction_time * predict_eval_ratio <= last_eval_time or last_prediction_counter >= predict_eval_ratio
851+
fit_observations = (
852+
last_prediction_time * predict_eval_ratio <= last_eval_time
853+
or last_prediction_counter >= predict_eval_ratio
854+
)
855855
if fit_observations:
856856
last_prediction_counter = 0
857857
pred_start = time.perf_counter()

0 commit comments

Comments
 (0)