@@ -864,30 +864,23 @@ def hyperparametrizable(self, params):
864864 for key in self ._hyperkeys :
865865 log .info (" > > Testing %s = %s" , key , params [key ])
866866 params = self ._hyperopt_override (params )
867-
868- # if not doing hyperot, read the input hyperopt file containing
869- # different samples
870- else :
871- with open (params ['hyperopt_res' ], 'r' ) as file :
872- hyperopt_params = json .load (file )
873- import pdb ; pdb .set_trace ()
874867 # Preprocess some hyperparameters
875- if self .mode_hyperopt :
868+ if self .mode_hyperopt or ( not self . trials ) :
876869 epochs = int (params ["epochs" ])
877870 stopping_patience = params ["stopping_patience" ]
878871 stopping_epochs = int (epochs * stopping_patience )
879872 else :
880- idx_hyperparamters = self .replicas [0 ]% 10
881- epochs = int (hyperopt_params ["epochs" ][idx_hyperparamters ])
882- stopping_patience = hyperopt_params ["stopping_patience" ][idx_hyperparamters ]
873+ idx_hyperparamters = self .replicas [0 ] % self . trials [ "number_of_trials" ]
874+ epochs = int (self . trials ["epochs" ][idx_hyperparamters ])
875+ stopping_patience = self . trials ["stopping_patience" ][idx_hyperparamters ]
883876 stopping_epochs = int (epochs * stopping_patience )
884877
885878
886879 # Fill the 3 dictionaries (training, validation, experimental) with the layers and losses
887880 # when k-folding, these are the same for all folds
888881 positivity_dict = params .get ("positivity" , {})
889- if not self .mode_hyperopt :
890- positivity_dict ['initial' ] = hyperopt_params ["initial" ][idx_hyperparamters ]
882+ if not self .mode_hyperopt and self . trials :
883+ positivity_dict ['initial' ] = self . trials ["initial" ][idx_hyperparamters ]
891884 integrability_dict = params .get ("integrability" , {})
892885 self ._generate_observables (
893886 positivity_dict .get ("multiplier" ),
@@ -926,7 +919,7 @@ def hyperparametrizable(self, params):
926919
927920 # Prepare the settings for all replica
928921 replicas_settings = []
929- if self .mode_hyperopt :
922+ if self .mode_hyperopt or ( not self . trials ) :
930923 for seed in self ._nn_seeds :
931924 tmp = model_gen .ReplicaSettings (
932925 seed = seed ,
@@ -942,18 +935,18 @@ def hyperparametrizable(self, params):
942935 else :
943936 # read hyperparameter values from hyperopt results
944937 for rep , seed in zip (self .replicas , self ._nn_seeds ):
945- idx_hyperparamters = rep % 10
946- activations = [hyperopt_params ["activation_per_layer" ][idx_hyperparamters ]] * (len (hyperopt_params ["nodes_per_layer" ][idx_hyperparamters ])- 1 )
938+ idx_hyperparamters = rep % self . trials [ "number_of_trials" ]
939+ activations = [self . trials ["activation_per_layer" ][idx_hyperparamters ]] * (len (self . trials ["nodes_per_layer" ][idx_hyperparamters ])- 1 )
947940 # last layer activation is always linear
948941 activations .append ('linear' )
949942
950943 tmp = model_gen .ReplicaSettings (
951944 seed = seed ,
952- nodes = hyperopt_params ["nodes_per_layer" ][idx_hyperparamters ],
945+ nodes = self . trials ["nodes_per_layer" ][idx_hyperparamters ],
953946 activations = activations ,
954- initializer = hyperopt_params ["initializer" ][idx_hyperparamters ],
955- architecture = hyperopt_params ["layer_type" ][idx_hyperparamters ],
956- dropout_rate = hyperopt_params ["dropout" ][idx_hyperparamters ],
947+ initializer = self . trials ["initializer" ][idx_hyperparamters ],
948+ architecture = self . trials ["layer_type" ][idx_hyperparamters ],
949+ dropout_rate = self . trials ["dropout" ][idx_hyperparamters ],
957950 regularizer = params .get ("regularizer" ),
958951 regularizer_args = params .get ("regularizer_args" ),
959952 )
@@ -1027,17 +1020,17 @@ def hyperparametrizable(self, params):
10271020 threshold_chi2 = threshold_chi2 ,
10281021 )
10291022
1030- if self .mode_hyperopt :
1023+ if self .mode_hyperopt or ( not self . trials ) :
10311024 # Compile each of the models with the right parameters
10321025 for model in models .values ():
10331026 model .compile (** params ["optimizer" ])
10341027 else :
10351028 # Proper way of doing this? Not sure how optimizer parameters should be treated
1036- idx_hyperparamters = self .replicas [0 ]% 10
1029+ idx_hyperparamters = self .replicas [0 ] % self . trials [ "number_of_trials" ]
10371030 optimizer_params = {}
1038- optimizer_params ["clipnorm" ] = hyperopt_params ['clipnorm' ][idx_hyperparamters ]
1039- optimizer_params ["learning_rate" ] = hyperopt_params ['learning_rate' ][idx_hyperparamters ]
1040- optimizer_params ["optimizer_name" ] = hyperopt_params ['optimizer' ][idx_hyperparamters ]
1031+ optimizer_params ["clipnorm" ] = self . trials ['clipnorm' ][idx_hyperparamters ]
1032+ optimizer_params ["learning_rate" ] = self . trials ['learning_rate' ][idx_hyperparamters ]
1033+ optimizer_params ["optimizer_name" ] = self . trials ['optimizer' ][idx_hyperparamters ]
10411034 for model in models .values ():
10421035 model .compile (** optimizer_params )
10431036
0 commit comments