Skip to content

Commit 40f73b6

Browse files
committed
replace tf_keras with tf.keras
1 parent 907dd52 commit 40f73b6

File tree

1 file changed

+13
-14
lines changed

1 file changed

+13
-14
lines changed

AI-and-Analytics/Features-and-Functionality/IntelTensorFlow_Enabling_Auto_Mixed_Precision_for_TransferLearning/enabling_automixed_precision_for_transfer_learning_with_tensorflow.ipynb

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@
3232
"import tensorflow_hub as hub\n",
3333
"from datetime import datetime\n",
3434
"import requests\n",
35-
"import tf_keras\n",
3635
"print(\"We are using Tensorflow version: \", tf.__version__)"
3736
]
3837
},
@@ -99,7 +98,7 @@
9998
"outputs": [],
10099
"source": [
101100
"tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\n",
102-
"data_root = tf_keras.utils.get_file(\n",
101+
"data_root = tf.keras.utils.get_file(\n",
103102
" 'flower_photos',\n",
104103
" 'https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz',\n",
105104
" untar=True)\n",
@@ -108,7 +107,7 @@
108107
"img_height = 224\n",
109108
"img_width = 224\n",
110109
"\n",
111-
"train_ds = tf_keras.utils.image_dataset_from_directory(\n",
110+
"train_ds = tf.keras.utils.image_dataset_from_directory(\n",
112111
" str(data_root),\n",
113112
" validation_split=0.2,\n",
114113
" subset=\"training\",\n",
@@ -117,7 +116,7 @@
117116
" batch_size=batch_size\n",
118117
")\n",
119118
"\n",
120-
"val_ds = tf_keras.utils.image_dataset_from_directory(\n",
119+
"val_ds = tf.keras.utils.image_dataset_from_directory(\n",
121120
" str(data_root),\n",
122121
" validation_split=0.2,\n",
123122
" subset=\"validation\",\n",
@@ -147,7 +146,7 @@
147146
"metadata": {},
148147
"outputs": [],
149148
"source": [
150-
"normalization_layer = tf_keras.layers.Rescaling(1./255)\n",
149+
"normalization_layer = tf.keras.layers.Rescaling(1./255)\n",
151150
"train_ds = train_ds.map(lambda x, y: (normalization_layer(x), y)) # Where x—images, y—labels.\n",
152151
"val_ds = val_ds.map(lambda x, y: (normalization_layer(x), y)) # Where x—images, y—labels.\n",
153152
"\n",
@@ -221,7 +220,7 @@
221220
"id": "70b3eb9b",
222221
"metadata": {},
223222
"source": [
224-
"Attach the last fully connected classification layer in a **tf_keras.Sequential** model."
223+
"Attach the last fully connected classification layer in a **tf.keras.Sequential** model."
225224
]
226225
},
227226
{
@@ -233,14 +232,14 @@
233232
"source": [
234233
"num_classes = len(class_names)\n",
235234
"\n",
236-
"fp32_model = tf_keras.Sequential([\n",
235+
"fp32_model = tf.keras.Sequential([\n",
237236
" feature_extractor_layer,\n",
238-
" tf_keras.layers.Dense(num_classes)\n",
237+
" tf.keras.layers.Dense(num_classes)\n",
239238
"])\n",
240239
"\n",
241240
"if arch == 'SPR':\n",
242241
" # Create a deep copy of the model to train the bf16 model separately to compare accuracy\n",
243-
" bf16_model = tf_keras.models.clone_model(fp32_model)\n",
242+
" bf16_model = tf.keras.models.clone_model(fp32_model)\n",
244243
"\n",
245244
"fp32_model.summary()"
246245
]
@@ -260,7 +259,7 @@
260259
"metadata": {},
261260
"outputs": [],
262261
"source": [
263-
"class TimeHistory(tf_keras.callbacks.Callback):\n",
262+
"class TimeHistory(tf.keras.callbacks.Callback):\n",
264263
" def on_train_begin(self, logs={}):\n",
265264
" self.times = []\n",
266265
" self.throughput = []\n",
@@ -290,8 +289,8 @@
290289
"outputs": [],
291290
"source": [
292291
"fp32_model.compile(\n",
293-
" optimizer=tf_keras.optimizers.SGD(),\n",
294-
" loss=tf_keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n",
292+
" optimizer=tf.keras.optimizers.SGD(),\n",
293+
" loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n",
295294
" metrics=['acc'])"
296295
]
297296
},
@@ -374,8 +373,8 @@
374373
"if arch == 'SPR':\n",
375374
" # Compile\n",
376375
" bf16_model.compile(\n",
377-
" optimizer=tf_keras.optimizers.SGD(),\n",
378-
" loss=tf_keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n",
376+
" optimizer=tf.keras.optimizers.SGD(),\n",
377+
" loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n",
379378
" metrics=['acc'])\n",
380379
" \n",
381380
" # Train\n",

0 commit comments

Comments
 (0)