Skip to content

Commit a234e30

Browse files
authored
Merge pull request #227 from rizoudal/rizoudal
Rafactor(NeuralNetwork): remove workers, batch_queue_size, multiprocessing parameters
2 parents 84902ee + 493e5e3 commit a234e30

27 files changed

+1640
-1767
lines changed

aucmedi/automl/block_pred.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -74,9 +74,6 @@ def block_predict(config):
7474
# Define neural network parameters
7575
nn_paras = {"n_labels": 1, # placeholder
7676
"channels": 1, # placeholder
77-
"workers": config["workers"],
78-
"batch_queue_size": 4,
79-
"multiprocessing": False,
8077
}
8178
# Select input shape for 3D
8279
if meta_training["three_dim"]:
@@ -123,7 +120,7 @@ def block_predict(config):
123120
standardize_mode=model.meta_standardize,
124121
**paras_datagen)
125122
# Load model
126-
path_model = os.path.join(config["path_modeldir"], "model.last.hdf5")
123+
path_model = os.path.join(config["path_modeldir"], "model.last.keras")
127124
model.load(path_model)
128125
# Start model inference
129126
preds = model.predict(prediction_generator=pred_gen)
@@ -142,7 +139,7 @@ def block_predict(config):
142139
**paras_datagen)
143140
# Load model
144141
path_model = os.path.join(config["path_modeldir"],
145-
"model.best_loss.hdf5")
142+
"model.best_loss.keras")
146143
model.load(path_model)
147144
# Start model inference via Augmenting
148145
preds = predict_augmenting(model, pred_gen)

aucmedi/automl/block_train.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def block_train(config):
105105
callbacks = []
106106
if config["analysis"] == "standard":
107107
cb_loss = ModelCheckpoint(os.path.join(config["path_modeldir"],
108-
"model.best_loss.hdf5"),
108+
"model.best_loss.keras"),
109109
monitor="val_loss", verbose=1,
110110
save_best_only=True)
111111
callbacks.append(cb_loss)
@@ -136,12 +136,9 @@ def block_train(config):
136136
# Define neural network parameters
137137
nn_paras = {"n_labels": class_n,
138138
"channels": 3,
139-
"workers": config["workers"],
140-
"batch_queue_size": 4,
141139
"loss": loss,
142140
"metrics": [AUC(100)],
143141
"pretrained_weights": True,
144-
"multiprocessing": False,
145142
}
146143
# Select input shape for 3D
147144
if config["three_dim"] : nn_paras["input_shape"] = config["shape_3D"]
@@ -217,7 +214,7 @@ def block_train(config):
217214
# Start model training
218215
hist = model.train(training_generator=train_gen, **paras_train)
219216
# Store model
220-
path_model = os.path.join(config["path_modeldir"], "model.last.hdf5")
217+
path_model = os.path.join(config["path_modeldir"], "model.last.keras")
221218
model.dump(path_model)
222219
elif config["analysis"] == "standard":
223220
# Setup neural network
@@ -250,7 +247,7 @@ def block_train(config):
250247
validation_generator=val_gen,
251248
**paras_train)
252249
# Store model
253-
path_model = os.path.join(config["path_modeldir"], "model.last.hdf5")
250+
path_model = os.path.join(config["path_modeldir"], "model.last.keras")
254251
model.dump(path_model)
255252
else:
256253
# Sanity check of architecutre config

aucmedi/data_processing/augmentation/aug_image.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
# External libraries
2323
from albumentations import Compose
2424
import albumentations.augmentations as ai
25+
import cv2
2526
import warnings
2627
import numpy as np
2728
import random
@@ -291,11 +292,9 @@ def apply(self, image):
291292
aug_image = self.operator(image=image)["image"]
292293
# Perform padding & cropping if image shape changed
293294
if self.refine and aug_image.shape != org_shape:
294-
aug_image = ai.pad(aug_image, org_shape[0], org_shape[1])
295-
offset = (random.random(), random.random())
296-
aug_image = ai.random_crop(aug_image,
297-
org_shape[0], org_shape[1],
298-
offset[0], offset[1])
295+
aug_image = ai.pad(aug_image, org_shape[0], org_shape[1], border_mode=cv2.BORDER_REPLICATE,
296+
value=0)
297+
aug_image = ai.RandomCrop(height=org_shape[0], width=org_shape[1])(image=aug_image)["image"]
299298
# Perform clipping if image is out of grayscale/RGB encodings
300299
if self.refine and (np.min(aug_image) < 0 or np.max(aug_image) > 255):
301300
aug_image = np.clip(aug_image, a_min=0, a_max=255)

aucmedi/data_processing/data_generator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ def _get_batches_of_transformed_samples(self, index_array):
301301
# Stack images and optional metadata together into a batch
302302
input_stack = np.stack(batch_stack[0], axis=0)
303303
if self.metadata is not None:
304-
input_stack = [input_stack, self.metadata[index_array]]
304+
input_stack = (input_stack, self.metadata[index_array])
305305
batch = (input_stack, )
306306
# Stack classifications together into a batch if available
307307
if self.labels is not None:

aucmedi/ensemble/bagging.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def train(self, training_generator, epochs=20, iterations=None,
164164
# Extend Callback list
165165
cb_mc = ModelCheckpoint(os.path.join(self.cache_dir.name,
166166
"cv_" + str(i) + \
167-
".model.hdf5"),
167+
".model.keras"),
168168
monitor="val_loss", verbose=1,
169169
save_best_only=True, mode="min")
170170
cb_cl = CSVLogger(os.path.join(self.cache_dir.name,
@@ -186,9 +186,6 @@ def train(self, training_generator, epochs=20, iterations=None,
186186
"fcl_dropout": self.model_template.fcl_dropout,
187187
"meta_variables": self.model_template.meta_variables,
188188
"learning_rate": self.model_template.learning_rate,
189-
"batch_queue_size": self.model_template.batch_queue_size,
190-
"workers": self.model_template.workers,
191-
"multiprocessing": self.model_template.multiprocessing,
192189
}
193190

194191
# Gather DataGenerator parameters
@@ -309,7 +306,7 @@ def predict(self, prediction_generator, aggregate="mean",
309306
for i in range(self.k_fold):
310307
# Identify path to fitted model
311308
path_model = os.path.join(path_model_dir,
312-
"cv_" + str(i) + ".model.hdf5")
309+
"cv_" + str(i) + ".model.keras")
313310

314311
# Gather NeuralNetwork parameters
315312
model_paras = {
@@ -324,9 +321,6 @@ def predict(self, prediction_generator, aggregate="mean",
324321
"fcl_dropout": self.model_template.fcl_dropout,
325322
"meta_variables": self.model_template.meta_variables,
326323
"learning_rate": self.model_template.learning_rate,
327-
"batch_queue_size": self.model_template.batch_queue_size,
328-
"workers": self.model_template.workers,
329-
"multiprocessing": self.model_template.multiprocessing,
330324
}
331325

332326
# Start inference process for fold i
@@ -391,7 +385,7 @@ def load(self, directory_path):
391385
# Check model existence
392386
for i in range(self.k_fold):
393387
path_model = os.path.join(directory_path,
394-
"cv_" + str(i) + ".model.hdf5")
388+
"cv_" + str(i) + ".model.keras")
395389
if not os.path.exists(path_model):
396390
raise FileNotFoundError("Bagging model for fold " + str(i) + \
397391
" does not exist!", path_model)

aucmedi/ensemble/composite.py

Lines changed: 4 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@ def train(self, training_generator, epochs=20, iterations=None,
220220
callbacks_model = callbacks.copy()
221221
# Extend Callback list
222222
path_model = os.path.join(self.cache_dir.name,
223-
"cv_" + str(i) + ".model.hdf5")
223+
"cv_" + str(i) + ".model.keras")
224224
cb_mc = ModelCheckpoint(path_model,
225225
monitor="val_loss", verbose=1,
226226
save_best_only=True, mode="min")
@@ -243,9 +243,6 @@ def train(self, training_generator, epochs=20, iterations=None,
243243
"fcl_dropout": self.model_list[i].fcl_dropout,
244244
"meta_variables": self.model_list[i].meta_variables,
245245
"learning_rate": self.model_list[i].learning_rate,
246-
"batch_queue_size": self.model_list[i].batch_queue_size,
247-
"workers": self.model_list[i].workers,
248-
"multiprocessing": self.model_list[i].multiprocessing,
249246
}
250247

251248
# Gather DataGenerator parameters
@@ -339,7 +336,7 @@ def train_metalearner(self, training_generator):
339336
for i in range(len(self.model_list)):
340337
# Load current model
341338
path_model = os.path.join(path_model_dir,
342-
"cv_" + str(i) + ".model.hdf5")
339+
"cv_" + str(i) + ".model.keras")
343340

344341
# Gather NeuralNetwork parameters
345342
model_paras = {
@@ -354,9 +351,6 @@ def train_metalearner(self, training_generator):
354351
"fcl_dropout": self.model_list[i].fcl_dropout,
355352
"meta_variables": self.model_list[i].meta_variables,
356353
"learning_rate": self.model_list[i].learning_rate,
357-
"batch_queue_size": self.model_list[i].batch_queue_size,
358-
"workers": self.model_list[i].workers,
359-
"multiprocessing": self.model_list[i].multiprocessing,
360354
}
361355

362356
# Gather DataGenerator parameters
@@ -453,7 +447,7 @@ def predict(self, prediction_generator, return_ensemble=False):
453447
# Sequentially iterate over model list
454448
for i in range(len(self.model_list)):
455449
path_model = os.path.join(path_model_dir,
456-
"cv_" + str(i) + ".model.hdf5")
450+
"cv_" + str(i) + ".model.keras")
457451

458452
# Gather NeuralNetwork parameters
459453
model_paras = {
@@ -468,9 +462,6 @@ def predict(self, prediction_generator, return_ensemble=False):
468462
"fcl_dropout": self.model_list[i].fcl_dropout,
469463
"meta_variables": self.model_list[i].meta_variables,
470464
"learning_rate": self.model_list[i].learning_rate,
471-
"batch_queue_size": self.model_list[i].batch_queue_size,
472-
"workers": self.model_list[i].workers,
473-
"multiprocessing": self.model_list[i].multiprocessing,
474465
}
475466

476467
# Gather DataGenerator parameters
@@ -563,7 +554,7 @@ def load(self, directory_path):
563554
# Check model existence
564555
for i in range(len(self.model_list)):
565556
path_model = os.path.join(directory_path,
566-
"cv_" + str(i) + ".model.hdf5")
557+
"cv_" + str(i) + ".model.keras")
567558
if not os.path.exists(path_model):
568559
raise FileNotFoundError("Composite model " + str(i) + \
569560
" does not exist!", path_model)

aucmedi/ensemble/stacking.py

Lines changed: 4 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ def train(self, training_generator, epochs=20, iterations=None,
208208
callbacks_model = callbacks.copy()
209209
# Extend Callback list
210210
path_model = os.path.join(self.cache_dir.name,
211-
"nn_" + str(i) + ".model.hdf5")
211+
"nn_" + str(i) + ".model.keras")
212212
cb_mc = ModelCheckpoint(path_model,
213213
monitor="val_loss", verbose=1,
214214
save_best_only=True, mode="min")
@@ -231,9 +231,6 @@ def train(self, training_generator, epochs=20, iterations=None,
231231
"fcl_dropout": self.model_list[i].fcl_dropout,
232232
"meta_variables": self.model_list[i].meta_variables,
233233
"learning_rate": self.model_list[i].learning_rate,
234-
"batch_queue_size": self.model_list[i].batch_queue_size,
235-
"workers": self.model_list[i].workers,
236-
"multiprocessing": self.model_list[i].multiprocessing,
237234
}
238235

239236
# Gather DataGenerator parameters
@@ -327,7 +324,7 @@ def train_metalearner(self, training_generator):
327324
for i in range(len(self.model_list)):
328325
# Load current model
329326
path_model = os.path.join(path_model_dir,
330-
"nn_" + str(i) + ".model.hdf5")
327+
"nn_" + str(i) + ".model.keras")
331328

332329
# Gather NeuralNetwork parameters
333330
model_paras = {
@@ -342,9 +339,6 @@ def train_metalearner(self, training_generator):
342339
"fcl_dropout": self.model_list[i].fcl_dropout,
343340
"meta_variables": self.model_list[i].meta_variables,
344341
"learning_rate": self.model_list[i].learning_rate,
345-
"batch_queue_size": self.model_list[i].batch_queue_size,
346-
"workers": self.model_list[i].workers,
347-
"multiprocessing": self.model_list[i].multiprocessing,
348342
}
349343

350344
# Gather DataGenerator parameters
@@ -440,7 +434,7 @@ def predict(self, prediction_generator, return_ensemble=False):
440434
# Sequentially iterate over model list
441435
for i in range(len(self.model_list)):
442436
path_model = os.path.join(path_model_dir,
443-
"nn_" + str(i) + ".model.hdf5")
437+
"nn_" + str(i) + ".model.keras")
444438

445439
# Gather NeuralNetwork parameters
446440
model_paras = {
@@ -455,9 +449,6 @@ def predict(self, prediction_generator, return_ensemble=False):
455449
"fcl_dropout": self.model_list[i].fcl_dropout,
456450
"meta_variables": self.model_list[i].meta_variables,
457451
"learning_rate": self.model_list[i].learning_rate,
458-
"batch_queue_size": self.model_list[i].batch_queue_size,
459-
"workers": self.model_list[i].workers,
460-
"multiprocessing": self.model_list[i].multiprocessing,
461452
}
462453

463454
# Gather DataGenerator parameters
@@ -550,7 +541,7 @@ def load(self, directory_path):
550541
# Check model existence
551542
for i in range(len(self.model_list)):
552543
path_model = os.path.join(directory_path,
553-
"nn_" + str(i) + ".model.hdf5")
544+
"nn_" + str(i) + ".model.keras")
554545
if not os.path.exists(path_model):
555546
raise FileNotFoundError("Stacking model " + str(i) + \
556547
" does not exist!", path_model)

aucmedi/neural_network/architectures/classifier.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ class Classifier:
7575
???+ example
7676
```python
7777
# Recommended way (automatic creation in NeuralNetwork)
78-
model = NeuralNetwork(n_labels=20, channels=3, batch_queue_size=1,
78+
model = NeuralNetwork(n_labels=20, channels=3,
7979
input_shape=(32, 32), activation_output="sigmoid",
8080
fcl_dropout=False)
8181

0 commit comments

Comments
 (0)