@@ -17,7 +17,7 @@ def randpath():
17
17
path = randpath ()
18
18
return path
19
19
20
- def tune_hyper_params (target_strategy : str , hyper_params : dict , * args , ** kwargs ):
20
+ def tune_hyper_params (target_strategy : str , hyper_params : dict , restrictions : list , * args , ** kwargs ):
21
21
"""Tune hyperparameters for a given strategy and kernel.
22
22
23
23
This function is to be called just like tune_kernel, except that you specify a strategy
@@ -80,7 +80,7 @@ def put_if_not_present(target_dict, key, value):
80
80
name = f"hyperparamtuning_{ target_strategy .lower ()} "
81
81
82
82
# execute the hyperparameter tuning
83
- result , env = kernel_tuner .tune_kernel (name , None , [], arguments , hyper_params , * args , lang = 'Hypertuner' ,
83
+ result , env = kernel_tuner .tune_kernel (name , None , [], arguments , hyper_params , restrictions = restrictions , * args , lang = 'Hypertuner' ,
84
84
objective = 'score' , objective_higher_is_better = True , iterations = iterations , ** kwargs )
85
85
86
86
# remove the temporary cachefile and return only unique results in order
@@ -99,6 +99,7 @@ def put_if_not_present(target_dict, key, value):
99
99
strategy_to_tune = args .strategy_to_tune
100
100
101
101
# select the hyperparameter parameters for the selected optimization algorithm
102
+ restrictions = []
102
103
if strategy_to_tune .lower () == "pso" :
103
104
hyperparams = {
104
105
'popsize' : [10 , 20 , 30 ],
@@ -169,6 +170,6 @@ def put_if_not_present(target_dict, key, value):
169
170
raise ValueError (f"Invalid argument { strategy_to_tune = } " )
170
171
171
172
# run the hyperparameter tuning
172
- result , env = tune_hyper_params (strategy_to_tune .lower (), hyperparams )
173
+ result , env = tune_hyper_params (strategy_to_tune .lower (), hyperparams , restrictions = restrictions )
173
174
print (result )
174
175
print (env ['best_config' ])
0 commit comments