Skip to content

Commit cbd89d3

Browse files
committed
Merge branch 'sampling' of github.com:NNPDF/nnpdf into sampling
2 parents a4c4886 + 85243e8 commit cbd89d3

File tree

1 file changed

+5
-7
lines changed

1 file changed

+5
-7
lines changed

n3fit/src/n3fit/model_trainer.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1021,19 +1021,17 @@ def hyperparametrizable(self, params):
10211021
)
10221022

10231023
if self.mode_hyperopt or (not self.trials):
1024-
# Compile each of the models with the right parameters
1025-
for model in models.values():
1026-
model.compile(**params["optimizer"])
1024+
optimizer_params = params["optimizer"]
10271025
else:
1028-
# Proper way of doing this? Not sure how optimizer parameters should be treated
10291026
idx_hyperparamters = self.replicas[0] % self.trials["number_of_trials"]
10301027
optimizer_params = {}
10311028
optimizer_params["clipnorm"] = self.trials['clipnorm'][idx_hyperparamters]
10321029
optimizer_params["learning_rate"] = self.trials['learning_rate'][idx_hyperparamters]
10331030
optimizer_params["optimizer_name"] = self.trials['optimizer'][idx_hyperparamters]
1034-
for model in models.values():
1035-
model.compile(**optimizer_params)
1036-
1031+
1032+
# Compile each of the training/validation models with the same optimization parameters
1033+
for model in models.values():
1034+
model.compile(**optimizer_params)
10371035
self._train_and_fit(models["training"], stopping_object, epochs=epochs)
10381036

10391037
if self.mode_hyperopt:

0 commit comments

Comments
 (0)