3636# ' - [Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift](https://arxiv.org/abs/1502.03167)
3737# '
3838# ' @export
39- layer_batch_normalization <- function (object , axis = - 1L , momentum = 0.99 , epsilon = 0.001 , center = TRUE , scale = TRUE ,
40- beta_initializer = " zeros" , gamma_initializer = " ones" ,
41- moving_mean_initializer = " zeros" , moving_variance_initializer = " ones" ,
42- beta_regularizer = NULL , gamma_regularizer = NULL ,
43- beta_constraint = NULL , gamma_constraint = NULL ,
44- input_shape = NULL , batch_input_shape = NULL , batch_size = NULL ,
39+ layer_batch_normalization <- function (object , axis = - 1L , momentum = 0.99 , epsilon = 0.001 , center = TRUE , scale = TRUE ,
40+ beta_initializer = " zeros" , gamma_initializer = " ones" ,
41+ moving_mean_initializer = " zeros" , moving_variance_initializer = " ones" ,
42+ beta_regularizer = NULL , gamma_regularizer = NULL , renorm = FALSE ,
43+ renorm_clipping = NULL , beta_constraint = NULL , gamma_constraint = NULL ,
44+ input_shape = NULL , batch_input_shape = NULL , batch_size = NULL ,
4545 dtype = NULL , name = NULL , trainable = NULL , weights = NULL ) {
4646 create_layer(keras $ layers $ BatchNormalization , object , list (
4747 axis = as.integer(axis ),
@@ -57,6 +57,8 @@ layer_batch_normalization <- function(object, axis = -1L, momentum = 0.99, epsil
5757 gamma_regularizer = gamma_regularizer ,
5858 beta_constraint = beta_constraint ,
5959 gamma_constraint = gamma_constraint ,
60+ renorm = renorm ,
61+ renorm_clipping = renorm_clipping ,
6062 input_shape = normalize_shape(input_shape ),
6163 batch_input_shape = normalize_shape(batch_input_shape ),
6264 batch_size = as_nullable_integer(batch_size ),
0 commit comments