Skip to content

Commit 108060d

Browse files
chenmoneygithubcopybara-github
authored andcommitted
Code changes to get ready for an incoming Keras optimizer migration.
Because this code is still testing TF1, we should use the legacy optimizer. PiperOrigin-RevId: 473298980
1 parent bfeac97 commit 108060d

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

site/en/guide/migrate/canned_estimators.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -196,15 +196,15 @@
196196
"source": [
197197
"def create_sample_optimizer(tf_version):\n",
198198
" if tf_version == 'tf1':\n",
199-
" optimizer = lambda: tf.keras.optimizers.Ftrl(\n",
199+
" optimizer = lambda: tf.keras.optimizers.legacy.Ftrl(\n",
200200
" l1_regularization_strength=0.001,\n",
201201
" learning_rate=tf1.train.exponential_decay(\n",
202202
" learning_rate=0.1,\n",
203203
" global_step=tf1.train.get_global_step(),\n",
204204
" decay_steps=10000,\n",
205205
" decay_rate=0.9))\n",
206206
" elif tf_version == 'tf2':\n",
207-
" optimizer = tf.keras.optimizers.Ftrl(\n",
207+
" optimizer = tf.keras.optimizers.legacy.Ftrl(\n",
208208
" l1_regularization_strength=0.001,\n",
209209
" learning_rate=tf.keras.optimizers.schedules.ExponentialDecay(\n",
210210
" initial_learning_rate=0.1, decay_steps=10000, decay_rate=0.9))\n",

site/en/tutorials/generative/style_transfer.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -705,7 +705,7 @@
705705
},
706706
"outputs": [],
707707
"source": [
708-
"opt = tf.keras.optimizers.Adam(learning_rate=0.02, beta_1=0.99, epsilon=1e-1)"
708+
"opt = tf.keras.optimizers.legacy.Adam(learning_rate=0.02, beta_1=0.99, epsilon=1e-1)"
709709
]
710710
},
711711
{

0 commit comments

Comments
 (0)