|
113 | 113 | " # create model\n", |
114 | 114 | " autoencoder = MinNDAE(config)\n", |
115 | 115 | " \n", |
116 | | - " # get custom loss func\n", |
117 | | - " loss_function = build_encode_dim_loss_function(encode_dim, regularization_factor=REG_FACTOR)\n", |
| 116 | + " # get custom loss func using decorator factory\n", |
| 117 | + " custom_loss = build_encode_dim_loss_function(encode_dim, regularization_factor=REG_FACTOR)()\n", |
118 | 118 | " \n", |
119 | 119 | " # select loss function\n", |
120 | | - " autoencoder.compile(optimizer=\"adam\", loss=loss_function)\n", |
| 120 | + " autoencoder.compile(optimizer=\"adam\", loss=custom_loss)\n", |
121 | 121 | "\n", |
122 | 122 | " # now return keras model\n", |
123 | 123 | " return autoencoder.model" |
|
143 | 143 | "source": [ |
144 | 144 | "# get hyperparam tools\n", |
145 | 145 | "from keras.callbacks import EarlyStopping\n", |
146 | | - "from keras_tuner import GridSearch\n", |
| 146 | + "from keras_tuner import RandomSearch\n", |
147 | 147 | "\n", |
148 | 148 | "# setup tuner\n", |
149 | | - "tuner = GridSearch(\n", |
| 149 | + "tuner = RandomSearch(\n", |
150 | 150 | " build_autoencoder,\n", |
151 | 151 | " objective=\"val_loss\",\n", |
152 | | - " max_trials=50,\n", |
| 152 | + " max_trials=100,\n", |
153 | 153 | " directory=\"autoencoder_tuning/minndae\",\n", |
154 | | - " project_name=f\"grid_search_encode_dim_{REG_FACTOR}_reg\",\n", |
| 154 | + " project_name=f\"random_search_encode_dim_{REG_FACTOR}_reg\",\n", |
155 | 155 | " seed=42,\n", |
156 | 156 | ")\n", |
157 | 157 | "\n", |
|
162 | 162 | "tuner.search_space_summary()\n", |
163 | 163 | "\n", |
164 | 164 | "# run the hyperparameter search\n", |
165 | | - "tuner.search(train_ds, epochs=10, validation_data=test_ds, callbacks=[stop_early])" |
| 165 | + "tuner.search(train_ds, epochs=3, validation_data=test_ds, callbacks=[stop_early])" |
166 | 166 | ] |
167 | 167 | }, |
168 | 168 | { |
|
0 commit comments