Skip to content

Commit f594384

Browse files
legacy optimizer 4
1 parent 39fd863 commit f594384

File tree

4 files changed

+24
-7
lines changed

4 files changed

+24
-7
lines changed

adapt/base.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,11 @@
1414
from sklearn.exceptions import NotFittedError
1515
from tensorflow.keras import Model
1616
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier, KerasRegressor
17+
try:
18+
from tensorflow.keras.optimizers.legacy import RMSprop
19+
except:
20+
from tensorflow.keras.optimizers import RMSprop
21+
1722

1823
from adapt.utils import (check_estimator,
1924
check_network,
@@ -1308,7 +1313,7 @@ def compile(self,
13081313

13091314
if ((not "optimizer" in compile_params) or
13101315
(compile_params["optimizer"] is None)):
1311-
compile_params["optimizer"] = "rmsprop"
1316+
compile_params["optimizer"] = RMSprop()
13121317
else:
13131318
if optimizer is None:
13141319
if not isinstance(compile_params["optimizer"], str):

tests/test_ccsa.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,10 @@
44
from adapt.utils import make_classification_da
55
from adapt.feature_based import CCSA
66
from tensorflow.keras.initializers import GlorotUniform
7+
try:
8+
from tensorflow.keras.optimizers.legacy import Adam
9+
except:
10+
from tensorflow.keras.optimizers import Adam
711

812
np.random.seed(0)
913
tf.random.set_seed(0)
@@ -18,13 +22,13 @@
1822

1923
def test_ccsa():
2024
ccsa = CCSA(task=task, loss="categorical_crossentropy",
21-
optimizer="adam", metrics=["acc"], gamma=0.1, random_state=0)
25+
optimizer=Adam(), metrics=["acc"], gamma=0.1, random_state=0)
2226
ccsa.fit(Xs, tf.one_hot(ys, 2).numpy(), Xt=Xt[ind],
2327
yt=tf.one_hot(yt, 2).numpy()[ind], epochs=100, verbose=0)
2428
assert np.mean(ccsa.predict(Xt).argmax(1) == yt) > 0.8
2529

2630
ccsa = CCSA(task=task, loss="categorical_crossentropy",
27-
optimizer="adam", metrics=["acc"], gamma=1., random_state=0)
31+
optimizer=Adam(), metrics=["acc"], gamma=1., random_state=0)
2832
ccsa.fit(Xs, tf.one_hot(ys, 2).numpy(), Xt=Xt[ind],
2933
yt=tf.one_hot(yt, 2).numpy()[ind], epochs=100, verbose=0)
3034

tests/test_finetuning.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,10 @@
55
from adapt.utils import make_classification_da
66
from adapt.parameter_based import FineTuning
77
from tensorflow.keras.initializers import GlorotUniform
8+
try:
9+
from tensorflow.keras.optimizers.legacy import Adam
10+
except:
11+
from tensorflow.keras.optimizers import Adam
812

913
np.random.seed(0)
1014
tf.random.set_seed(0)
@@ -21,14 +25,14 @@
2125

2226

2327
def test_finetune():
24-
model = FineTuning(encoder=encoder, task=task, loss="bce", optimizer="adam", random_state=0)
28+
model = FineTuning(encoder=encoder, task=task, loss="bce", optimizer=Adam(), random_state=0)
2529
model.fit(Xs, ys, epochs=100, verbose=0)
2630

2731
assert np.mean((model.predict(Xt).ravel()>0.5) == yt) < 0.7
2832

2933
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
3034
training=False,
31-
loss="bce", optimizer="adam", random_state=0)
35+
loss="bce", optimizer=Adam(), random_state=0)
3236
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
3337

3438
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() == 0.

tests/test_tradaboost.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,10 @@
88
from sklearn.linear_model import LinearRegression, LogisticRegression, Ridge, RidgeClassifier
99
from sklearn.metrics import r2_score, accuracy_score
1010
import tensorflow as tf
11+
try:
12+
from tensorflow.keras.optimizers.legacy import Adam
13+
except:
14+
from tensorflow.keras.optimizers import Adam
1115

1216
from adapt.instance_based import (TrAdaBoost,
1317
TrAdaBoostR2,
@@ -50,14 +54,14 @@ def test_tradaboost_fit_keras_model():
5054
np.random.seed(0)
5155
est = tf.keras.Sequential()
5256
est.add(tf.keras.layers.Dense(1, activation="sigmoid"))
53-
est.compile(loss="bce", optimizer="adam")
57+
est.compile(loss="bce", optimizer=Adam())
5458
model = TrAdaBoost(est, n_estimators=2, random_state=0)
5559
model.fit(Xs, ys_classif, Xt=Xt[:10], yt=yt_classif[:10])
5660
yp = model.predict(Xt)
5761

5862
est = tf.keras.Sequential()
5963
est.add(tf.keras.layers.Dense(2, activation="softmax"))
60-
est.compile(loss="mse", optimizer="adam")
64+
est.compile(loss="mse", optimizer=Adam())
6165
model = TrAdaBoost(est, n_estimators=2, random_state=0)
6266
model.fit(Xs, np.random.random((100, 2)),
6367
Xt=Xt[:10], yt=np.random.random((10, 2)))

0 commit comments

Comments
 (0)