Skip to content

Commit f7da474

Browse files
Code changes to get ready for an incoming Keras optimizer migration.
PiperOrigin-RevId: 472844636
1 parent 79cd5da commit f7da474

File tree

5 files changed

+5
-5
lines changed

5 files changed

+5
-5
lines changed

tensorflow_probability/python/bijectors/masked_autoregressive_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -797,7 +797,7 @@ def test_doc_string_2(self):
797797
x_, bijector_kwargs={"conditional_input": c_})
798798
model = tfk.Model([x_, c_], log_prob_)
799799

800-
if tf.__internal__.tf2.enabled():
800+
if tf.__internal__.tf2.enabled() and tf.executing_eagerly():
801801
optimizer = tf.keras.optimizers.Adam(learning_rate=0.1)
802802
else:
803803
optimizer = tf.keras.optimizers.legacy.Adam(learning_rate=0.1)

tensorflow_probability/python/distributions/lambertw_f_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ def dist_lambda(t):
193193
tf.keras.layers.Dense(1 + 1 + 1),
194194
dist_layer])
195195
negloglik = lambda y, p_y: -p_y.log_prob(y)
196-
if tf.__internal__.tf2.enabled():
196+
if tf.__internal__.tf2.enabled() and tf.executing_eagerly():
197197
optimizer = tf.keras.optimizers.Adam(learning_rate=0.01)
198198
else:
199199
optimizer = tf.keras.optimizers.legacy.Adam(learning_rate=0.01)

tensorflow_probability/python/layers/dense_variational_v2_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def test_end_to_end(self):
8484
tfp.layers.DistributionLambda(lambda t: tfd.Normal(loc=t, scale=1))
8585
])
8686

87-
if tf.__internal__.tf2.enabled():
87+
if tf.__internal__.tf2.enabled() and tf.executing_eagerly():
8888
optimizer = tf.keras.optimizers.Adam(learning_rate=0.05)
8989
else:
9090
optimizer = tf.keras.optimizers.legacy.Adam(learning_rate=0.05)

tensorflow_probability/python/layers/distribution_layer_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def _unwrap_tensor_coercible(dist):
5555

5656

5757
def _get_adam_optimizer(learning_rate):
58-
if tf.__internal__.tf2.enabled():
58+
if tf.__internal__.tf2.enabled() and tf.executing_eagerly():
5959
return tf.keras.optimizers.Adam(learning_rate=learning_rate)
6060
return tf.keras.optimizers.legacy.Adam(learning_rate=learning_rate)
6161

tensorflow_probability/python/layers/weight_norm_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,7 @@ def testGradientValues(self, model_type):
224224

225225
@parameterized.parameters(['sequential', 'sequential_no_input', 'functional'])
226226
def testTrainableVariableInitializationInModelFit(self, model_type):
227-
if tf.__internal__.tf2.enabled():
227+
if tf.__internal__.tf2.enabled() and tf.executing_eagerly():
228228
sgd = tf.keras.optimizers.SGD(lr=0.)
229229
else:
230230
sgd = tf.keras.optimizers.legacy.SGD(lr=0.)

0 commit comments

Comments
 (0)