@@ -92,8 +92,7 @@ bn* batch_normalization(int batch_size, int vector_input_dimension){
9292 * */
9393bn * batch_normalization_without_arrays (int batch_size , int vector_input_dimension ){
9494 if (batch_size <= 0 || vector_input_dimension < 1 ){
95- fprintf (stderr ,"Error: batch size <= 0 and vector_input:dimension < 1 are not admissible!\n" );
96- exit (1 );
95+ return NULL ;
9796 }
9897
9998
@@ -280,31 +279,31 @@ void save_bn(bn* b, int n){
280279 exit (1 );
281280 }
282281
283-
282+ convert_data ( & b -> batch_size , sizeof ( int ), 1 );
284283 i = fwrite (& b -> batch_size ,sizeof (int ),1 ,fw );
285-
284+ convert_data ( & b -> batch_size , sizeof ( int ), 1 );
286285 if (i != 1 ){
287286 fprintf (stderr ,"Error: an error occurred saving a bn layer\n" );
288287 exit (1 );
289288 }
290-
289+ convert_data ( & b -> vector_dim , sizeof ( int ), 1 );
291290 i = fwrite (& b -> vector_dim ,sizeof (int ),1 ,fw );
292-
291+ convert_data ( & b -> vector_dim , sizeof ( int ), 1 );
293292 if (i != 1 ){
294293 fprintf (stderr ,"Error: an error occurred saving a bn layer\n" );
295294 exit (1 );
296295 }
297-
296+ convert_data ( & b -> gamma , sizeof ( float ), 1 );
298297 i = fwrite (b -> gamma ,sizeof (float )* (b -> vector_dim ),1 ,fw );
299-
298+ convert_data ( & b -> gamma , sizeof ( float ), 1 );
300299 if (i != 1 ){
301300 fprintf (stderr ,"Error: an error occurred saving a bn layer\n" );
302301 exit (1 );
303302 }
304303
305-
304+ convert_data ( & b -> beta , sizeof ( float ), 1 );
306305 i = fwrite (b -> beta ,sizeof (float )* (b -> vector_dim ),1 ,fw );
307-
306+ convert_data ( & b -> beta , sizeof ( float ), 1 );
308307 if (i != 1 ){
309308 fprintf (stderr ,"Error: an error occurred saving a bn layer\n" );
310309 exit (1 );
@@ -338,14 +337,14 @@ bn* load_bn(FILE* fr){
338337 float * beta ;
339338
340339 i = fread (& batch_size ,sizeof (int ),1 ,fr );
341-
340+ convert_data ( & batch_size , sizeof ( int ), 1 );
342341 if (i != 1 ){
343342 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
344343 exit (1 );
345344 }
346345
347346 i = fread (& vector_dim ,sizeof (int ),1 ,fr );
348-
347+ convert_data ( & vector_dim , sizeof ( int ), 1 );
349348 if (i != 1 ){
350349 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
351350 exit (1 );
@@ -356,15 +355,15 @@ bn* load_bn(FILE* fr){
356355 beta = (float * )malloc (sizeof (float )* vector_dim );
357356
358357 i = fread (gamma ,sizeof (float )* vector_dim ,1 ,fr );
359-
358+ convert_data ( & gamma , sizeof ( float ), 1 );
360359 if (i != 1 ){
361360 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
362361 exit (1 );
363362 }
364363
365364
366365 i = fread (beta ,sizeof (float )* vector_dim ,1 ,fr );
367-
366+ convert_data ( & beta , sizeof ( float ), 1 );
368367 if (i != 1 ){
369368 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
370369 exit (1 );
@@ -402,14 +401,14 @@ bn* load_bn_only_for_ff(FILE* fr){
402401
403402
404403 i = fread (& batch_size ,sizeof (int ),1 ,fr );
405-
404+ convert_data ( & batch_size , sizeof ( int ), 1 );
406405 if (i != 1 ){
407406 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
408407 exit (1 );
409408 }
410409
411410 i = fread (& vector_dim ,sizeof (int ),1 ,fr );
412-
411+ convert_data ( & vector_dim , sizeof ( int ), 1 );
413412 if (i != 1 ){
414413 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
415414 exit (1 );
@@ -420,15 +419,15 @@ bn* load_bn_only_for_ff(FILE* fr){
420419 beta = (float * )malloc (sizeof (float )* vector_dim );
421420
422421 i = fread (gamma ,sizeof (float )* vector_dim ,1 ,fr );
423-
422+ convert_data ( & gamma , sizeof ( float ), 1 );
424423 if (i != 1 ){
425424 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
426425 exit (1 );
427426 }
428427
429428
430429 i = fread (beta ,sizeof (float )* vector_dim ,1 ,fr );
431-
430+ convert_data ( & beta , sizeof ( float ), 1 );
432431 if (i != 1 ){
433432 fprintf (stderr ,"Error: an error occurred loading a bn layer\n" );
434433 exit (1 );
0 commit comments