11
11
12
12
supported_searchtechniques = ["auc_bandit" , "differential_evolution" , "pattern_search" , "round_robin" , "simulated_annealing" , "torczon" ]
13
13
14
- _options = dict (searchtechnique = (f"PyATF optimization algorithm to use, choose any from { supported_searchtechniques } " , "simulated_annealing" ))
14
+ _options = dict (
15
+ searchtechnique = (f"PyATF optimization algorithm to use, choose any from { supported_searchtechniques } " , "simulated_annealing" ),
16
+ use_searchspace_cache = (f"Use a cached search space if available, otherwise create a new one." , False )
17
+ )
15
18
16
19
def get_cache_checksum (d : dict ):
17
20
checksum = 0
@@ -26,9 +29,13 @@ def tune(searchspace: Searchspace, runner, tuning_options):
26
29
from pyatf .search_techniques .search_technique import SearchTechnique
27
30
from pyatf .search_space import SearchSpace as pyATFSearchSpace
28
31
from pyatf import TP
32
+
33
+ # get the search technique module name and whether to use search space caching
34
+ module_name , use_searchspace_cache = common .get_options (tuning_options .strategy_options , _options )
29
35
try :
30
- import dill
31
- pyatf_search_space_caching = True
36
+ if use_searchspace_cache :
37
+ import dill
38
+ pyatf_search_space_caching = use_searchspace_cache
32
39
except ImportError :
33
40
from warnings import warn
34
41
pyatf_search_space_caching = False
@@ -38,7 +45,6 @@ def tune(searchspace: Searchspace, runner, tuning_options):
38
45
cost_func = CostFunc (searchspace , tuning_options , runner , scaling = False , snap = False , return_invalid = False )
39
46
40
47
# dynamically import the search technique based on the provided options
41
- module_name , = common .get_options (tuning_options .strategy_options , _options )
42
48
module = import_module (f"pyatf.search_techniques.{ module_name } " )
43
49
class_name = [d for d in dir (module ) if d .lower () == module_name .replace ('_' ,'' )][0 ]
44
50
searchtechnique_class = getattr (module , class_name )
0 commit comments