Skip to content

Commit ea22320

Browse files
legacy optimizer 5
1 parent f594384 commit ea22320

File tree

4 files changed

+26
-10
lines changed

4 files changed

+26
-10
lines changed

adapt/parameter_based/_transfer_tree.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -456,7 +456,11 @@ def _force_coherence(self,rule,node=0,Translate=False,indexes_nodes=list(),drift
456456
if not coh:
457457
if Translate :
458458
if auto_drift:
459-
b_infs,b_sups = ut.bounds_rule(rule,self.estimator_.n_features_)
459+
try:
460+
n_feat = self.estimator_.n_features_
461+
except:
462+
n_feat = self.estimator_.n_features_in_
463+
b_infs,b_sups = ut.bounds_rule(rule, n_feat)
460464
if non_coherent_sense == -1:
461465
if b_sups[phi] == np.inf:
462466
self.updateSplit(node,phi,th+D_MARGIN)
@@ -817,8 +821,12 @@ def _strut(self,X_target_node,Y_target_node,node=0,no_prune_on_cl=False,cl_no_pr
817821
maj_class = np.argmax(self.estimator_.tree_.value[node, :].copy())
818822

819823
if min_drift is None or max_drift is None:
820-
min_drift = np.zeros(self.estimator_.n_features_)
821-
max_drift = np.zeros(self.estimator_.n_features_)
824+
try:
825+
n_feat = self.estimator_.n_features_
826+
except:
827+
n_feat = self.estimator_.n_features_in_
828+
min_drift = np.zeros(self.estimator_.n_feat)
829+
max_drift = np.zeros(self.estimator_.n_feat)
822830

823831
current_class_distribution = ut.compute_class_distribution(classes_, Y_target_node)
824832
is_reached = (Y_target_node.size > 0)

tests/test_finetuning.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -41,23 +41,23 @@ def test_finetune():
4141

4242
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
4343
training=True,
44-
loss="bce", optimizer="adam", random_state=0)
44+
loss="bce", optimizer=Adam(), random_state=0)
4545
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
4646

4747
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() > 1.
4848
assert np.mean((fine_tuned.predict(Xt).ravel()>0.5) == yt) > 0.9
4949

5050
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
5151
training=[True, False],
52-
loss="bce", optimizer="adam", random_state=0)
52+
loss="bce", optimizer=Adam(), random_state=0)
5353
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
5454

5555
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() == 0.
5656
assert np.abs(fine_tuned.encoder_.get_weights()[-1] - model.encoder_.get_weights()[-1]).sum() > 1.
5757

5858
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
5959
training=[False],
60-
loss="bce", optimizer="adam", random_state=0)
60+
loss="bce", optimizer=Adam(), random_state=0)
6161
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
6262

6363
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() == 0.
@@ -66,13 +66,13 @@ def test_finetune():
6666

6767
def test_finetune_pretrain():
6868
model = FineTuning(encoder=encoder, task=task, pretrain=True, pretrain__epochs=2,
69-
loss="bce", optimizer="adam", random_state=0)
69+
loss="bce", optimizer=Adam(), random_state=0)
7070
model.fit(Xs, ys, epochs=1, verbose=0)
7171

7272

7373
def test_clone():
7474
model = FineTuning(encoder=encoder, task=task,
75-
loss="bce", optimizer="adam", random_state=0)
75+
loss="bce", optimizer=Adam(), random_state=0)
7676
model.fit(Xs, ys, epochs=1, verbose=0)
7777

7878
new_model = clone(model)

tests/test_iwc.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@
77
from adapt.utils import make_classification_da
88
from adapt.instance_based import IWC
99
from adapt.utils import get_default_discriminator
10+
try:
11+
from tensorflow.keras.optimizers.legacy import Adam
12+
except:
13+
from tensorflow.keras.optimizers import Adam
1014

1115
Xs, ys, Xt, yt = make_classification_da()
1216

@@ -34,7 +38,7 @@ def test_default_classif():
3438

3539
def test_nn_classif():
3640
model = IWC(RidgeClassifier(0.), classifier=get_default_discriminator(),
37-
cl_params=dict(epochs=10, optimizer="adam", loss="bce", verbose=0),
41+
cl_params=dict(epochs=10, optimizer=Adam(), loss="bce", verbose=0),
3842
Xt=Xt, random_state=0)
3943
model.fit(Xs, ys);
4044
model.predict(Xt)

tests/test_iwn.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@
77
from adapt.instance_based import IWN
88
from adapt.utils import get_default_task
99
from sklearn.neighbors import KNeighborsClassifier
10+
try:
11+
from tensorflow.keras.optimizers.legacy import Adam
12+
except:
13+
from tensorflow.keras.optimizers import Adam
1014

1115
Xs, ys, Xt, yt = make_classification_da()
1216

@@ -21,7 +25,7 @@ def test_iwn():
2125

2226
def test_iwn_fit_estim():
2327
task = get_default_task()
24-
task.compile(optimizer="adam", loss="mse", metrics=["mae"])
28+
task.compile(optimizer=Adam(), loss="mse", metrics=["mae"])
2529
model = IWN(task, Xt=Xt, sigma_init=0.1, random_state=0,
2630
pretrain=True, pretrain__epochs=100, pretrain__verbose=0)
2731
model.fit(Xs, ys)

0 commit comments

Comments
 (0)