Skip to content

Commit ebfc313

Browse files
arashwantensorflower-gardener
authored andcommitted
Internal change
PiperOrigin-RevId: 338094579
1 parent 4353954 commit ebfc313

File tree

4 files changed

+14
-4
lines changed

4 files changed

+14
-4
lines changed

official/vision/beta/modeling/decoders/aspp.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ def __init__(self,
3131
use_sync_bn=False,
3232
norm_momentum=0.99,
3333
norm_epsilon=0.001,
34+
activation='relu',
3435
dropout_rate=0.0,
3536
kernel_initializer='VarianceScaling',
3637
kernel_regularizer=None,
@@ -46,6 +47,7 @@ def __init__(self,
4647
norm_momentum: `float` normalization omentum for the moving average.
4748
norm_epsilon: `float` small float added to variance to avoid dividing by
4849
zero.
50+
activation: `str` activation to be used in ASPP.
4951
dropout_rate: `float` rate for dropout regularization.
5052
kernel_initializer: kernel_initializer for convolutional layers.
5153
kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
@@ -61,6 +63,7 @@ def __init__(self,
6163
'use_sync_bn': use_sync_bn,
6264
'norm_momentum': norm_momentum,
6365
'norm_epsilon': norm_epsilon,
66+
'activation': activation,
6467
'dropout_rate': dropout_rate,
6568
'kernel_initializer': kernel_initializer,
6669
'kernel_regularizer': kernel_regularizer,
@@ -74,6 +77,7 @@ def build(self, input_shape):
7477
use_sync_bn=self._config_dict['use_sync_bn'],
7578
batchnorm_momentum=self._config_dict['norm_momentum'],
7679
batchnorm_epsilon=self._config_dict['norm_epsilon'],
80+
activation=self._config_dict['activation'],
7781
dropout=self._config_dict['dropout_rate'],
7882
kernel_initializer=self._config_dict['kernel_initializer'],
7983
kernel_regularizer=self._config_dict['kernel_regularizer'],

official/vision/beta/modeling/decoders/aspp_test.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ def test_serialize_deserialize(self):
6464
use_sync_bn=False,
6565
norm_momentum=0.99,
6666
norm_epsilon=0.001,
67+
activation='relu',
6768
kernel_initializer='VarianceScaling',
6869
kernel_regularizer=None,
6970
interpolation='bilinear',

official/vision/beta/modeling/decoders/factory.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ def build_decoder(input_specs,
6161
use_sync_bn=norm_activation_config.use_sync_bn,
6262
norm_momentum=norm_activation_config.norm_momentum,
6363
norm_epsilon=norm_activation_config.norm_epsilon,
64+
activation=norm_activation_config.activation,
6465
kernel_regularizer=l2_regularizer)
6566
else:
6667
raise ValueError('Decoder {!r} not implement'.format(decoder_type))

official/vision/keras_cv/layers/deeplab.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ def __init__(
3333
use_sync_bn=False,
3434
batchnorm_momentum=0.99,
3535
batchnorm_epsilon=0.001,
36+
activation='relu',
3637
dropout=0.5,
3738
kernel_initializer='glorot_uniform',
3839
kernel_regularizer=None,
@@ -48,6 +49,7 @@ def __init__(
4849
0.99.
4950
batchnorm_epsilon: A float for the epsilon value in BatchNorm. Defaults to
5051
0.001.
52+
activation: A `str` for type of activation to be used. Defaults to 'relu'.
5153
dropout: A float for the dropout rate before output. Defaults to 0.5.
5254
kernel_initializer: Kernel initializer for conv layers. Defaults to
5355
`glorot_uniform`.
@@ -63,6 +65,7 @@ def __init__(
6365
self.use_sync_bn = use_sync_bn
6466
self.batchnorm_momentum = batchnorm_momentum
6567
self.batchnorm_epsilon = batchnorm_epsilon
68+
self.activation = activation
6669
self.dropout = dropout
6770
self.kernel_initializer = tf.keras.initializers.get(kernel_initializer)
6871
self.kernel_regularizer = tf.keras.regularizers.get(kernel_regularizer)
@@ -96,7 +99,7 @@ def build(self, input_shape):
9699
axis=bn_axis,
97100
momentum=self.batchnorm_momentum,
98101
epsilon=self.batchnorm_epsilon),
99-
tf.keras.layers.Activation('relu')
102+
tf.keras.layers.Activation(self.activation)
100103
])
101104
self.aspp_layers.append(conv_sequential)
102105

@@ -109,7 +112,7 @@ def build(self, input_shape):
109112
dilation_rate=dilation_rate, use_bias=False),
110113
bn_op(axis=bn_axis, momentum=self.batchnorm_momentum,
111114
epsilon=self.batchnorm_epsilon),
112-
tf.keras.layers.Activation('relu')])
115+
tf.keras.layers.Activation(self.activation)])
113116
self.aspp_layers.append(conv_sequential)
114117

115118
pool_sequential = tf.keras.Sequential([
@@ -124,7 +127,7 @@ def build(self, input_shape):
124127
axis=bn_axis,
125128
momentum=self.batchnorm_momentum,
126129
epsilon=self.batchnorm_epsilon),
127-
tf.keras.layers.Activation('relu'),
130+
tf.keras.layers.Activation(self.activation),
128131
tf.keras.layers.experimental.preprocessing.Resizing(
129132
height, width, interpolation=self.interpolation)])
130133
self.aspp_layers.append(pool_sequential)
@@ -139,7 +142,7 @@ def build(self, input_shape):
139142
axis=bn_axis,
140143
momentum=self.batchnorm_momentum,
141144
epsilon=self.batchnorm_epsilon),
142-
tf.keras.layers.Activation('relu'),
145+
tf.keras.layers.Activation(self.activation),
143146
tf.keras.layers.Dropout(rate=self.dropout)])
144147

145148
def call(self, inputs, training=None):
@@ -159,6 +162,7 @@ def get_config(self):
159162
'use_sync_bn': self.use_sync_bn,
160163
'batchnorm_momentum': self.batchnorm_momentum,
161164
'batchnorm_epsilon': self.batchnorm_epsilon,
165+
'activation': self.activation,
162166
'dropout': self.dropout,
163167
'kernel_initializer': tf.keras.initializers.serialize(
164168
self.kernel_initializer),

0 commit comments

Comments
 (0)