@@ -33,6 +33,7 @@ def __init__(
33
33
use_sync_bn = False ,
34
34
batchnorm_momentum = 0.99 ,
35
35
batchnorm_epsilon = 0.001 ,
36
+ activation = 'relu' ,
36
37
dropout = 0.5 ,
37
38
kernel_initializer = 'glorot_uniform' ,
38
39
kernel_regularizer = None ,
@@ -48,6 +49,7 @@ def __init__(
48
49
0.99.
49
50
batchnorm_epsilon: A float for the epsilon value in BatchNorm. Defaults to
50
51
0.001.
52
+ activation: A `str` for type of activation to be used. Defaults to 'relu'.
51
53
dropout: A float for the dropout rate before output. Defaults to 0.5.
52
54
kernel_initializer: Kernel initializer for conv layers. Defaults to
53
55
`glorot_uniform`.
@@ -63,6 +65,7 @@ def __init__(
63
65
self .use_sync_bn = use_sync_bn
64
66
self .batchnorm_momentum = batchnorm_momentum
65
67
self .batchnorm_epsilon = batchnorm_epsilon
68
+ self .activation = activation
66
69
self .dropout = dropout
67
70
self .kernel_initializer = tf .keras .initializers .get (kernel_initializer )
68
71
self .kernel_regularizer = tf .keras .regularizers .get (kernel_regularizer )
@@ -96,7 +99,7 @@ def build(self, input_shape):
96
99
axis = bn_axis ,
97
100
momentum = self .batchnorm_momentum ,
98
101
epsilon = self .batchnorm_epsilon ),
99
- tf .keras .layers .Activation ('relu' )
102
+ tf .keras .layers .Activation (self . activation )
100
103
])
101
104
self .aspp_layers .append (conv_sequential )
102
105
@@ -109,7 +112,7 @@ def build(self, input_shape):
109
112
dilation_rate = dilation_rate , use_bias = False ),
110
113
bn_op (axis = bn_axis , momentum = self .batchnorm_momentum ,
111
114
epsilon = self .batchnorm_epsilon ),
112
- tf .keras .layers .Activation ('relu' )])
115
+ tf .keras .layers .Activation (self . activation )])
113
116
self .aspp_layers .append (conv_sequential )
114
117
115
118
pool_sequential = tf .keras .Sequential ([
@@ -124,7 +127,7 @@ def build(self, input_shape):
124
127
axis = bn_axis ,
125
128
momentum = self .batchnorm_momentum ,
126
129
epsilon = self .batchnorm_epsilon ),
127
- tf .keras .layers .Activation ('relu' ),
130
+ tf .keras .layers .Activation (self . activation ),
128
131
tf .keras .layers .experimental .preprocessing .Resizing (
129
132
height , width , interpolation = self .interpolation )])
130
133
self .aspp_layers .append (pool_sequential )
@@ -139,7 +142,7 @@ def build(self, input_shape):
139
142
axis = bn_axis ,
140
143
momentum = self .batchnorm_momentum ,
141
144
epsilon = self .batchnorm_epsilon ),
142
- tf .keras .layers .Activation ('relu' ),
145
+ tf .keras .layers .Activation (self . activation ),
143
146
tf .keras .layers .Dropout (rate = self .dropout )])
144
147
145
148
def call (self , inputs , training = None ):
@@ -159,6 +162,7 @@ def get_config(self):
159
162
'use_sync_bn' : self .use_sync_bn ,
160
163
'batchnorm_momentum' : self .batchnorm_momentum ,
161
164
'batchnorm_epsilon' : self .batchnorm_epsilon ,
165
+ 'activation' : self .activation ,
162
166
'dropout' : self .dropout ,
163
167
'kernel_initializer' : tf .keras .initializers .serialize (
164
168
self .kernel_initializer ),
0 commit comments