5
5
6
6
__all__ = [
7
7
'fc' , 'data' , 'cross_entropy' , 'conv2d' , 'pool2d' , 'embedding' , 'concat' ,
8
- 'StaticRNN' , 'cast' , 'batch_norm '
8
+ 'StaticRNN' , 'cast' , 'sequence_conv' , 'sequence_pool '
9
9
]
10
10
11
11
@@ -165,6 +165,18 @@ def func(**kwargs):
165
165
_create_op_func_ ('reshape' )
166
166
167
167
168
+ def cast (x , data_type , program = None ):
169
+ helper = LayerHelper ('cast' , ** locals ())
170
+ out = helper .create_tmp_variable (dtype = data_type )
171
+ helper .append_op (
172
+ type = 'cast' ,
173
+ inputs = {'X' : [x ]},
174
+ outputs = {'Out' : [out ]},
175
+ attrs = {'in_data_type' : x .data_type ,
176
+ 'out_data_type' : out .data_type })
177
+ return out
178
+
179
+
168
180
def cast (x , data_type , program = None ):
169
181
helper = LayerHelper ('cast' , ** locals ())
170
182
out = helper .create_tmp_variable (dtype = data_type )
@@ -220,6 +232,46 @@ def square_error_cost(input, label, **kwargs):
220
232
return square_out
221
233
222
234
235
+ def sequence_conv (input ,
236
+ num_filters ,
237
+ name = None ,
238
+ filter_size = 3 ,
239
+ act = None ,
240
+ stride = 1 ,
241
+ padding = None ,
242
+ bias_attr = None ,
243
+ param_attr = None ,
244
+ program = None ,
245
+ init_program = None ):
246
+ # FIXME(dzh) : want to unify the argument of python layer
247
+ # function. So we ignore some unecessary attributes.
248
+ # such as, padding_trainable, context_start.
249
+
250
+ helper = LayerHelper ('sequence_conv' , ** locals ())
251
+ dtype = helper .input_dtype ()
252
+
253
+ filter_shape = [num_filters , filter_size ]
254
+ filter = helper .create_parameter (
255
+ attr = helper .param_attr , shape = filter_shape , dtype = dtype )
256
+ pre_bias = helper .create_tmp_variable (dtype )
257
+
258
+ helper .append_op (
259
+ type = 'sequence_conv' ,
260
+ inputs = {
261
+ 'X' : [input ],
262
+ 'Filter' : filter ,
263
+ },
264
+ outputs = {"Out" : pre_bias },
265
+ attrs = {
266
+ 'context_stride' : stride ,
267
+ 'context_start' : 0 ,
268
+ 'context_length' : filter_size
269
+ })
270
+
271
+ pre_act = helper .append_bias_op (pre_bias )
272
+ return helper .append_activation (pre_act )
273
+
274
+
223
275
def conv2d (input ,
224
276
num_filters ,
225
277
name = None ,
@@ -272,6 +324,35 @@ def conv2d(input,
272
324
return helper .append_activation (pre_act )
273
325
274
326
327
+ def sequence_pool (input ,
328
+ pool_size ,
329
+ pool_type ,
330
+ pool_stride = 1 ,
331
+ pool_padding = 0 ,
332
+ global_pooling = False ,
333
+ program = None ,
334
+ init_program = None ):
335
+ # FIXME(dzh) : want to unify the argument of python layer
336
+ # function. So we ignore some unecessary attributes
337
+
338
+ ENUM_POOL_TYPE = set (["max" , "avg" , "sqrt" , "last" , "first" ])
339
+ if pool_type not in ENUM_POOL_TYPE :
340
+ raise ValueError ("Unknown pool_type: '%s'. It can only be %s." ,
341
+ str (pool_type ), " " .join (ENUM_POOL_TYPE ))
342
+
343
+ helper = LayerHelper ('sequence_pool' , ** locals ())
344
+ dtype = helper .input_dtype ()
345
+ pool_out = helper .create_tmp_variable (dtype )
346
+
347
+ helper .append_op (
348
+ type = "sequence_pool" ,
349
+ inputs = {"X" : [input ]},
350
+ outputs = {"Out" : pool_out },
351
+ attrs = {"strategy" : pool_type })
352
+
353
+ return pool_out
354
+
355
+
275
356
def pool2d (input ,
276
357
pool_size ,
277
358
pool_type ,
@@ -291,7 +372,7 @@ def pool2d(input,
291
372
if isinstance (pool_padding , int ):
292
373
pool_padding = [pool_padding , pool_padding ]
293
374
294
- helper = LayerHelper ('conv2d ' , ** locals ())
375
+ helper = LayerHelper ('pool2d ' , ** locals ())
295
376
dtype = helper .input_dtype ()
296
377
pool_out = helper .create_tmp_variable (dtype )
297
378
0 commit comments