23
23
from ..param_attr import ParamAttr
24
24
from ..initializer import Normal , Constant , NumpyArrayInitializer
25
25
import numpy as np
26
+ import logging
26
27
27
28
__all__ = [
28
29
'Conv2D' , 'Conv3D' , 'Pool2D' , 'FC' , 'BatchNorm' , 'Embedding' , 'GRUUnit' ,
@@ -1374,13 +1375,20 @@ def _build_once(self, input):
1374
1375
shape = param_shape ,
1375
1376
dtype = self ._dtype ,
1376
1377
default_initializer = Constant (1.0 ))
1378
+ else :
1379
+ if self ._param_attr :
1380
+ logging .warn ("param_attr are only avaliable with scale is True" )
1381
+
1377
1382
if self ._shift :
1378
1383
assert self ._bias_attr is not False
1379
1384
self ._bias_w = self .create_parameter (
1380
1385
attr = self ._bias_attr ,
1381
1386
shape = param_shape ,
1382
1387
dtype = self ._dtype ,
1383
1388
is_bias = True )
1389
+ else :
1390
+ if self ._bias_attr :
1391
+ logging .warn ("bias_attr are only avaliable with shift is True" )
1384
1392
1385
1393
def forward (self , input ):
1386
1394
inputs = dict ()
@@ -1410,7 +1418,7 @@ def forward(self, input):
1410
1418
"begin_norm_axis" : self ._begin_norm_axis
1411
1419
})
1412
1420
1413
- return self ._helper .append_activation (layer_norm_out )
1421
+ return self ._helper .append_activation (layer_norm_out , act = self . _act )
1414
1422
1415
1423
1416
1424
class GRUUnit (layers .Layer ):
@@ -1648,6 +1656,7 @@ class NCE(layers.Layer):
1648
1656
def __init__ (self ,
1649
1657
name_scope ,
1650
1658
num_total_classes ,
1659
+ sample_weight = None ,
1651
1660
param_attr = None ,
1652
1661
bias_attr = None ,
1653
1662
num_neg_samples = None ,
@@ -1661,7 +1670,7 @@ def __init__(self,
1661
1670
self ._num_total_classes = num_total_classes
1662
1671
1663
1672
self ._inputs = dict ()
1664
-
1673
+ self . _inputs [ 'SampleWeight' ] = sample_weight if sample_weight is not None else []
1665
1674
if sampler == "uniform" :
1666
1675
sampler = 0
1667
1676
elif sampler == "log_uniform" :
@@ -1939,17 +1948,17 @@ def _build_once(self, x, y):
1939
1948
dtype = self ._dtype ,
1940
1949
is_bias = False )
1941
1950
1942
- if self ._bias_attr :
1943
- bias_size = [1 , self ._size ]
1944
- bias = self .create_parameter (
1945
- attr = self ._bias_attr ,
1946
- shape = bias_size ,
1947
- dtype = self ._dtype ,
1948
- is_bias = True )
1949
- self ._inputs ["Bias" ] = bias
1951
+ bias_size = [1 , self ._size ]
1952
+ self ._bias_param = self .create_parameter (
1953
+ attr = self ._bias_attr ,
1954
+ shape = bias_size ,
1955
+ dtype = self ._dtype ,
1956
+ is_bias = True )
1950
1957
1951
1958
def forward (self , x , y ):
1952
1959
self ._inputs = {"X" : x , "Y" : y , "Weight" : self ._w }
1960
+ if self ._bias_param :
1961
+ self ._inputs ["Bias" ] = self ._bias_param
1953
1962
if self ._name is not None :
1954
1963
out = self ._helper .create_variable (
1955
1964
name = "." .join ([self .full_name (), self ._name ]),
@@ -1964,7 +1973,7 @@ def forward(self, x, y):
1964
1973
outputs = {"Out" : out })
1965
1974
1966
1975
# add activation
1967
- return self ._helper .append_activation (out )
1976
+ return self ._helper .append_activation (out , act = self . _act )
1968
1977
1969
1978
1970
1979
class Conv2DTranspose (layers .Layer ):
@@ -2099,6 +2108,7 @@ def __init__(self,
2099
2108
assert param_attr is not False , "param_attr should not be False in conv2d_transpose."
2100
2109
self ._param_attr = param_attr
2101
2110
self ._bias_attr = bias_attr
2111
+ self ._act = act
2102
2112
self ._groups = groups
2103
2113
self ._num_filters = num_filters
2104
2114
self ._use_cudnn = use_cudnn
@@ -2162,6 +2172,12 @@ def _build_once(self, input):
2162
2172
self ._img_filter = self .create_parameter (
2163
2173
dtype = input .dtype , shape = filter_shape , attr = self ._param_attr )
2164
2174
2175
+ self ._bias_param = self .create_parameter (
2176
+ attr = self ._bias_attr ,
2177
+ shape = [self ._num_filters ],
2178
+ dtype = self ._dtype ,
2179
+ is_bias = True )
2180
+
2165
2181
def forward (self , input ):
2166
2182
pre_bias = self ._helper .create_variable_for_type_inference (
2167
2183
dtype = input .dtype )
@@ -2179,8 +2195,19 @@ def forward(self, input):
2179
2195
'use_cudnn' : self ._use_cudnn
2180
2196
})
2181
2197
2182
- pre_act = self ._helper .append_bias_op (pre_bias , dim_start = 1 , dim_end = 2 )
2183
- out = self ._helper .append_activation (pre_act )
2198
+ if self ._bias_param is not None :
2199
+ pre_act = self ._helper .create_variable_for_type_inference (
2200
+ dtype = self ._dtype )
2201
+ self ._helper .append_op (
2202
+ type = 'elementwise_add' ,
2203
+ inputs = {'X' : [pre_bias ],
2204
+ 'Y' : [self ._bias_param ]},
2205
+ outputs = {'Out' : [pre_act ]},
2206
+ attrs = {'axis' : 1 })
2207
+ else :
2208
+ pre_act = pre_bias
2209
+
2210
+ out = self ._helper .append_activation (pre_act , act = self ._act )
2184
2211
return out
2185
2212
2186
2213
@@ -2230,13 +2257,20 @@ def __init__(self,
2230
2257
self ._padding = padding
2231
2258
self ._bias_attr = bias_attr
2232
2259
self ._param_attr = param_attr
2260
+ self ._act = act
2233
2261
2234
2262
def _build_once (self , input ):
2235
2263
self ._dtype = self ._helper .input_dtype (input )
2236
2264
filter_shape = [self ._filter_size * input .shape [1 ], self ._num_filters ]
2237
2265
self ._filter_param = self .create_parameter (
2238
2266
attr = self ._param_attr , shape = filter_shape , dtype = self ._dtype )
2239
2267
2268
+ self ._bias_param = self .create_parameter (
2269
+ attr = self ._bias_attr ,
2270
+ shape = [self ._num_filters ],
2271
+ dtype = self ._dtype ,
2272
+ is_bias = True )
2273
+
2240
2274
def forward (self , input ):
2241
2275
pre_bias = self ._helper .create_variable_for_type_inference (self ._dtype )
2242
2276
self ._helper .append_op (
@@ -2251,8 +2285,20 @@ def forward(self, input):
2251
2285
'contextStart' : - int (self ._filter_size // 2 ),
2252
2286
'contextLength' : self ._filter_size
2253
2287
})
2254
- pre_act = self ._helper .append_bias_op (pre_bias )
2255
- return self ._helper .append_activation (pre_act )
2288
+
2289
+ if self ._bias_param is not None :
2290
+ pre_act = self ._helper .create_variable_for_type_inference (
2291
+ dtype = self ._dtype )
2292
+ self ._helper .append_op (
2293
+ type = 'elementwise_add' ,
2294
+ inputs = {'X' : [pre_bias ],
2295
+ 'Y' : [self ._bias_param ]},
2296
+ outputs = {'Out' : [pre_act ]},
2297
+ attrs = {'axis' : 1 })
2298
+ else :
2299
+ pre_act = pre_bias
2300
+
2301
+ return self ._helper .append_activation (pre_act , act = self ._act )
2256
2302
2257
2303
2258
2304
class RowConv (layers .Layer ):
@@ -2614,6 +2660,7 @@ def forward(self, nodes_vector, edge_set):
2614
2660
out = self .create_variable (
2615
2661
name = self ._name , dtype = self ._dtype , persistable = False )
2616
2662
else :
2663
+
2617
2664
out = self ._helper .create_variable_for_type_inference (
2618
2665
dtype = self ._dtype )
2619
2666
0 commit comments