Skip to content

Commit 0ba00a0

Browse files
committed
Removed transfer learning references
1 parent 5e65abd commit 0ba00a0

File tree

4 files changed

+1
-656
lines changed

4 files changed

+1
-656
lines changed

kernel_tuner/interface.py

Lines changed: 1 addition & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,6 @@
5252
bayes_opt,
5353
bayes_opt_alt_BOTorch,
5454
bayes_opt_BOTorch,
55-
bayes_opt_BOTorch_transfer_direct,
56-
bayes_opt_BOTorch_transfer_RGPE,
57-
bayes_opt_BOTorch_transfer_weighted,
5855
bayes_opt_GPyTorch,
5956
bayes_opt_GPyTorch_lean,
6057
bayes_opt_old,
@@ -94,9 +91,6 @@
9491
"bayes_opt_GPyTorch_lean": bayes_opt_GPyTorch_lean,
9592
"bayes_opt_BOTorch": bayes_opt_BOTorch,
9693
"bayes_opt_BOTorch_alt": bayes_opt_alt_BOTorch,
97-
"bayes_opt_BOTorch_transfer_direct": bayes_opt_BOTorch_transfer_direct,
98-
"bayes_opt_BOTorch_transfer_weighted": bayes_opt_BOTorch_transfer_weighted,
99-
"bayes_opt_BOTorch_transfer_RGPE": bayes_opt_BOTorch_transfer_RGPE,
10094
}
10195

10296

@@ -483,15 +477,6 @@ def __deepcopy__(self, _):
483477
"string",
484478
),
485479
),
486-
(
487-
"transfer_learning_caches",
488-
(
489-
"""Array of filepaths to caches to use for transfer learning.
490-
Filename uses suffix ".json", which is appended if missing.
491-
""",
492-
"list(string) or list(Path)",
493-
),
494-
),
495480
("metrics", ("specifies user-defined metrics, please see :ref:`metrics`.", "dict")),
496481
("simulation_mode", ("Simulate an auto-tuning search from an existing cachefile", "bool")),
497482
("observers", ("""A list of Observers to use during tuning, please see :ref:`observers`.""", "list")),
@@ -608,7 +593,6 @@ def tune_kernel(
608593
observers=None,
609594
objective=None,
610595
objective_higher_is_better=None,
611-
transfer_learning_caches=[],
612596
):
613597
start_overhead_time = perf_counter()
614598
if log:
@@ -710,15 +694,6 @@ def preprocess_cache(filepath):
710694
tuning_options.cache = {}
711695
tuning_options.cachefile = None
712696

713-
# process transfer learning caches
714-
tuning_options.transfer_learning_caches = []
715-
if transfer_learning_caches and len(transfer_learning_caches) > 0:
716-
for transfer_learning_cache in transfer_learning_caches:
717-
cache = preprocess_cache(transfer_learning_cache)
718-
assert cache != tuning_options.cache, "Transfer learning cache can not be the same as current cache"
719-
cache_data = util.read_cache(cache, open_cache=False)
720-
tuning_options.transfer_learning_caches.append(cache_data)
721-
722697
# create search space
723698
searchspace = Searchspace(tune_params, restrictions, runner.dev.max_threads)
724699
restrictions = searchspace._modified_restrictions
@@ -912,19 +887,9 @@ def tune_kernel_T1(
912887
device = kernelspec["Device"]["Name"]
913888
strategy = inputs["Search"]["Name"]
914889

915-
# set the cache and transfer learning cache paths
890+
# set the cache path
916891
if cache_filepath is None and "SimulationInput" in kernelspec:
917892
cache_filepath = Path(kernelspec["SimulationInput"])
918-
cache_dir = Path(cache_filepath).parent
919-
# TODO remove in production!
920-
transfer_learning_caches = [
921-
p
922-
for p in cache_dir.iterdir()
923-
if len(p.suffixes) > 0
924-
and p.suffixes[-1].endswith(".json")
925-
and not p.stem.endswith("_T4")
926-
and p.name != cache_filepath.name
927-
]
928893

929894
# get the grid divisions
930895
grid_divs = {}
@@ -1018,7 +983,6 @@ def tune_kernel_T1(
1018983
strategy_options=strategy_options,
1019984
objective=objective,
1020985
objective_higher_is_better=objective_higher_is_better,
1021-
transfer_learning_caches=transfer_learning_caches,
1022986
)
1023987
if output_T4:
1024988
return get_t4_metadata(), get_t4_results(results, tune_params, objective=objective)

0 commit comments

Comments
 (0)