|
52 | 52 | bayes_opt,
|
53 | 53 | bayes_opt_alt_BOTorch,
|
54 | 54 | bayes_opt_BOTorch,
|
55 |
| - bayes_opt_BOTorch_transfer_direct, |
56 |
| - bayes_opt_BOTorch_transfer_RGPE, |
57 |
| - bayes_opt_BOTorch_transfer_weighted, |
58 | 55 | bayes_opt_GPyTorch,
|
59 | 56 | bayes_opt_GPyTorch_lean,
|
60 | 57 | bayes_opt_old,
|
|
94 | 91 | "bayes_opt_GPyTorch_lean": bayes_opt_GPyTorch_lean,
|
95 | 92 | "bayes_opt_BOTorch": bayes_opt_BOTorch,
|
96 | 93 | "bayes_opt_BOTorch_alt": bayes_opt_alt_BOTorch,
|
97 |
| - "bayes_opt_BOTorch_transfer_direct": bayes_opt_BOTorch_transfer_direct, |
98 |
| - "bayes_opt_BOTorch_transfer_weighted": bayes_opt_BOTorch_transfer_weighted, |
99 |
| - "bayes_opt_BOTorch_transfer_RGPE": bayes_opt_BOTorch_transfer_RGPE, |
100 | 94 | }
|
101 | 95 |
|
102 | 96 |
|
@@ -483,15 +477,6 @@ def __deepcopy__(self, _):
|
483 | 477 | "string",
|
484 | 478 | ),
|
485 | 479 | ),
|
486 |
| - ( |
487 |
| - "transfer_learning_caches", |
488 |
| - ( |
489 |
| - """Array of filepaths to caches to use for transfer learning. |
490 |
| - Filename uses suffix ".json", which is appended if missing. |
491 |
| - """, |
492 |
| - "list(string) or list(Path)", |
493 |
| - ), |
494 |
| - ), |
495 | 480 | ("metrics", ("specifies user-defined metrics, please see :ref:`metrics`.", "dict")),
|
496 | 481 | ("simulation_mode", ("Simulate an auto-tuning search from an existing cachefile", "bool")),
|
497 | 482 | ("observers", ("""A list of Observers to use during tuning, please see :ref:`observers`.""", "list")),
|
@@ -608,7 +593,6 @@ def tune_kernel(
|
608 | 593 | observers=None,
|
609 | 594 | objective=None,
|
610 | 595 | objective_higher_is_better=None,
|
611 |
| - transfer_learning_caches=[], |
612 | 596 | ):
|
613 | 597 | start_overhead_time = perf_counter()
|
614 | 598 | if log:
|
@@ -710,15 +694,6 @@ def preprocess_cache(filepath):
|
710 | 694 | tuning_options.cache = {}
|
711 | 695 | tuning_options.cachefile = None
|
712 | 696 |
|
713 |
| - # process transfer learning caches |
714 |
| - tuning_options.transfer_learning_caches = [] |
715 |
| - if transfer_learning_caches and len(transfer_learning_caches) > 0: |
716 |
| - for transfer_learning_cache in transfer_learning_caches: |
717 |
| - cache = preprocess_cache(transfer_learning_cache) |
718 |
| - assert cache != tuning_options.cache, "Transfer learning cache can not be the same as current cache" |
719 |
| - cache_data = util.read_cache(cache, open_cache=False) |
720 |
| - tuning_options.transfer_learning_caches.append(cache_data) |
721 |
| - |
722 | 697 | # create search space
|
723 | 698 | searchspace = Searchspace(tune_params, restrictions, runner.dev.max_threads)
|
724 | 699 | restrictions = searchspace._modified_restrictions
|
@@ -912,19 +887,9 @@ def tune_kernel_T1(
|
912 | 887 | device = kernelspec["Device"]["Name"]
|
913 | 888 | strategy = inputs["Search"]["Name"]
|
914 | 889 |
|
915 |
| - # set the cache and transfer learning cache paths |
| 890 | + # set the cache path |
916 | 891 | if cache_filepath is None and "SimulationInput" in kernelspec:
|
917 | 892 | cache_filepath = Path(kernelspec["SimulationInput"])
|
918 |
| - cache_dir = Path(cache_filepath).parent |
919 |
| - # TODO remove in production! |
920 |
| - transfer_learning_caches = [ |
921 |
| - p |
922 |
| - for p in cache_dir.iterdir() |
923 |
| - if len(p.suffixes) > 0 |
924 |
| - and p.suffixes[-1].endswith(".json") |
925 |
| - and not p.stem.endswith("_T4") |
926 |
| - and p.name != cache_filepath.name |
927 |
| - ] |
928 | 893 |
|
929 | 894 | # get the grid divisions
|
930 | 895 | grid_divs = {}
|
@@ -1018,7 +983,6 @@ def tune_kernel_T1(
|
1018 | 983 | strategy_options=strategy_options,
|
1019 | 984 | objective=objective,
|
1020 | 985 | objective_higher_is_better=objective_higher_is_better,
|
1021 |
| - transfer_learning_caches=transfer_learning_caches, |
1022 | 986 | )
|
1023 | 987 | if output_T4:
|
1024 | 988 | return get_t4_metadata(), get_t4_results(results, tune_params, objective=objective)
|
|
0 commit comments