@@ -152,18 +152,18 @@ def hash_based_split(df, # Pandas dataframe
152152
153153
154154meta_trial_number = 0 # In distributed training set this to a random number
155- activation = 'swish '
156- predecessor_level_connection_affinity_factor_first = 0.506486683067576
157- predecessor_level_connection_affinity_factor_main = 1.6466748663373876
158- max_consecutive_lateral_connections = 35
159- p_lateral_connection = 3.703218275217572
160- num_lateral_connection_tries_per_unit = 12
161- learning_rate = 0.02804912925494706
162- epochs = 130
163- batch_size = 78
155+ activation = 'relu '
156+ predecessor_level_connection_affinity_factor_first = 3.458608634090366
157+ predecessor_level_connection_affinity_factor_main = 3.020897950280901
158+ max_consecutive_lateral_connections = 29
159+ p_lateral_connection = 0.4724567916748979
160+ num_lateral_connection_tries_per_unit = 7
161+ learning_rate = 0.04143817317646551
162+ epochs = 85
163+ batch_size = 97
164164maximum_levels = 4
165165maximum_units_per_level = 3
166- maximum_neurons_per_unit = 3
166+ maximum_neurons_per_unit = 5
167167
168168
169169cerebros = \
@@ -176,11 +176,11 @@ def hash_based_split(df, # Pandas dataframe
176176 validation_split = 0.0 ,
177177 direction = 'minimize' ,
178178 metric_to_rank_by = 'val_root_mean_squared_error' ,
179- minimum_levels = 4 ,
179+ minimum_levels = 3 ,
180180 maximum_levels = maximum_levels ,
181181 minimum_units_per_level = 2 ,
182182 maximum_units_per_level = maximum_units_per_level ,
183- minimum_neurons_per_unit = 3 ,
183+ minimum_neurons_per_unit = 4 ,
184184 maximum_neurons_per_unit = maximum_neurons_per_unit ,
185185 validation_data = (val_x , val_labels ),
186186 activation = activation ,
0 commit comments