|
63 | 63 | "from sklearn.model_selection import train_test_split\n", |
64 | 64 | "from sklearn.preprocessing import OneHotEncoder, LabelBinarizer\n", |
65 | 65 | "import tensorflow as tf\n", |
66 | | - "import tensorflow.keras as keras\n", |
67 | | - "import tensorflow.keras.backend as K\n", |
| 66 | + "from tensorflow import keras\n", |
| 67 | + "from keras import backend as K\n", |
68 | 68 | "import time\n", |
69 | 69 | "\n", |
70 | 70 | "print(\n", |
|
119 | 119 | "print(root_logdir)\n", |
120 | 120 | "print(kt_logdir) # folder for keras tuner results\n", |
121 | 121 | "print(tf_kt_logdir) # folder for TF checkpoints while keras tuning\n", |
122 | | - "print(tf_logdir) # folder for TF checkpoint for best model training" |
| 122 | + "print(tf_logdir) # folder for TF checkpoint for best model training\n", |
| 123 | + "\n", |
| 124 | + "os.makedirs(tf_logdir, exist_ok=True)" |
123 | 125 | ] |
124 | 126 | }, |
125 | 127 | { |
|
157 | 159 | " flip_y=1e-2,\n", |
158 | 160 | " random_state=None,\n", |
159 | 161 | ")\n", |
160 | | - "encoder = OneHotEncoder(sparse=False)\n", |
| 162 | + "encoder = OneHotEncoder(sparse_output=False)\n", |
161 | 163 | "# we encode as one-hot for TF model\n", |
162 | 164 | "Y = encoder.fit_transform(Y.reshape(-1, 1))\n", |
163 | 165 | "\n", |
|
209 | 211 | "def build_model(hp): # with hyper parameter ranges\n", |
210 | 212 | " model = keras.Sequential()\n", |
211 | 213 | " # input layer\n", |
212 | | - " model.add(keras.Input(shape=nx))\n", |
| 214 | + " model.add(keras.Input(shape=(nx, )))\n", |
213 | 215 | " # hidden layers\n", |
214 | 216 | " for layer in range(hp.Int(\"no_layers\", 1, 4)):\n", |
215 | 217 | " model.add(\n", |
|
260 | 262 | "model = build_model(kt.HyperParameters())\n", |
261 | 263 | "hptuner = kt.RandomSearch(\n", |
262 | 264 | " hypermodel=build_model,\n", |
263 | | - " objective=\"val_categorical_accuracy\", # check performance on val data!\n", |
| 265 | + " objective='val_loss', # check performance on val data!\n", |
264 | 266 | " max_trials=max_trials,\n", |
265 | 267 | " executions_per_trial=executions_per_trial,\n", |
266 | 268 | " overwrite=True,\n", |
|
319 | 321 | "# we might check the best XX models in detail\n", |
320 | 322 | "# for didactical purpose we choose only the very best one, located in [0]:\n", |
321 | 323 | "model = hptuner.get_best_models(num_models=1)[0]\n", |
322 | | - "model.save(tf_logdir + \"/best_model\")" |
| 324 | + "model.save(tf_logdir + \"/best_model.keras\")" |
323 | 325 | ] |
324 | 326 | }, |
325 | 327 | { |
|
356 | 358 | "outputs": [], |
357 | 359 | "source": [ |
358 | 360 | "# load best model and reset weights\n", |
359 | | - "model = keras.models.load_model(tf_logdir + \"/best_model\")\n", |
| 361 | + "model = keras.models.load_model(tf_logdir + \"/best_model.keras\")\n", |
360 | 362 | "reset_weights(model) # start training from scratch\n", |
361 | 363 | "print(model.summary())" |
362 | 364 | ] |
|
391 | 393 | " callbacks=[earlystopping_cb, tensorboard_cb],\n", |
392 | 394 | " verbose=verbose,\n", |
393 | 395 | ")\n", |
394 | | - "model.save(tf_logdir + \"/trained_best_model\")\n", |
| 396 | + "model.save(tf_logdir + \"/trained_best_model.keras\")\n", |
395 | 397 | "print(model.summary())" |
396 | 398 | ] |
397 | 399 | }, |
|
484 | 486 | "name": "python", |
485 | 487 | "nbconvert_exporter": "python", |
486 | 488 | "pygments_lexer": "ipython3", |
487 | | - "version": "3.10.6" |
| 489 | + "version": "3.12.3" |
488 | 490 | } |
489 | 491 | }, |
490 | 492 | "nbformat": 4, |
|
0 commit comments