Skip to content

Commit 2e3137d

Browse files
committed
temporary way to account for 10 best trials
1 parent 66bf666 commit 2e3137d

File tree

1 file changed

+13
-11
lines changed

1 file changed

+13
-11
lines changed

n3fit/src/n3fit/model_trainer.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -873,8 +873,9 @@ def hyperparametrizable(self, params):
873873
stopping_patience = params["stopping_patience"]
874874
stopping_epochs = int(epochs * stopping_patience)
875875
else:
876-
epochs = int(hyperopt_params["epochs"][self.replicas[0]-1])
877-
stopping_patience = hyperopt_params["stopping_patience"][self.replicas[0]-1]
876+
idx_hyperparamters = self.replicas[0]%10
877+
epochs = int(hyperopt_params["epochs"][idx_hyperparamters])
878+
stopping_patience = hyperopt_params["stopping_patience"][idx_hyperparamters]
878879
stopping_epochs = int(epochs * stopping_patience)
879880

880881

@@ -934,17 +935,18 @@ def hyperparametrizable(self, params):
934935
else:
935936
# read hyperparameter values from hyperopt results
936937
for rep, seed in zip(self.replicas, self._nn_seeds):
937-
activations = [hyperopt_params["activation_per_layer"][rep-1]] * (len(hyperopt_params["nodes_per_layer"][rep-1])-1)
938+
idx_hyperparamters = rep%10
939+
activations = [hyperopt_params["activation_per_layer"][idx_hyperparamters]] * (len(hyperopt_params["nodes_per_layer"][idx_hyperparamters])-1)
938940
# last layer activation is always linear
939941
activations.append('linear')
940942

941943
tmp = model_gen.ReplicaSettings(
942944
seed=seed,
943-
nodes=hyperopt_params["nodes_per_layer"][rep-1],
945+
nodes=hyperopt_params["nodes_per_layer"][idx_hyperparamters],
944946
activations=activations,
945-
initializer=hyperopt_params["initializer"][rep-1],
946-
architecture=hyperopt_params["layer_type"][rep-1],
947-
dropout_rate=hyperopt_params["dropout"][rep-1],
947+
initializer=hyperopt_params["initializer"][idx_hyperparamters],
948+
architecture=hyperopt_params["layer_type"][idx_hyperparamters],
949+
dropout_rate=hyperopt_params["dropout"][idx_hyperparamters],
948950
regularizer=params.get("regularizer"),
949951
regularizer_args=params.get("regularizer_args"),
950952
)
@@ -1024,11 +1026,11 @@ def hyperparametrizable(self, params):
10241026
model.compile(**params["optimizer"])
10251027
else:
10261028
# Proper way of doing this? Not sure how optimizer parameters should be treated
1029+
idx_hyperparamters = self.replicas[0]%10
10271030
optimizer_params = {}
1028-
optimizer_params["clipnorm"] = hyperopt_params['clipnorm'][self.replicas[0]-1]
1029-
optimizer_params["learning_rate"] = hyperopt_params['learning_rate'][self.replicas[0]-1]
1030-
optimizer_params["optimizer_name"] = hyperopt_params['optimizer'][self.replicas[0]-1]
1031-
1031+
optimizer_params["clipnorm"] = hyperopt_params['clipnorm'][idx_hyperparamters]
1032+
optimizer_params["learning_rate"] = hyperopt_params['learning_rate'][idx_hyperparamters]
1033+
optimizer_params["optimizer_name"] = hyperopt_params['optimizer'][idx_hyperparamters]
10321034
for model in models.values():
10331035
model.compile(**optimizer_params)
10341036

0 commit comments

Comments
 (0)