@@ -66,6 +66,128 @@ def forward(self, inputs):
66
66
return x
67
67
68
68
69
+ class SimpleRNNCell (fluid .imperative .Layer ):
70
+ def __init__ (self , step_input_size , hidden_size , output_size , param_attr ):
71
+ super (SimpleRNNCell , self ).__init__ ()
72
+ self .step_input_size = step_input_size
73
+ self .hidden_size = hidden_size
74
+ self .output_size = output_size
75
+ self ._dype = core .VarDesc .VarType .FP32
76
+ from paddle .fluid .layer_helper import LayerHelper
77
+ self ._helper = LayerHelper (
78
+ 'SimpleRNNCell' , act = "tanh" , param_attr = param_attr )
79
+
80
+ def _build_once (self , inputs , pre_hidden ):
81
+ i2h_param_shape = [self .step_input_size , self .hidden_size ]
82
+ h2h_param_shape = [self .hidden_size , self .hidden_size ]
83
+ h2o_param_shape = [self .output_size , self .hidden_size ]
84
+ self ._i2h_w = self ._helper .create_parameter (
85
+ attr = self ._helper .param_attr ,
86
+ shape = i2h_param_shape ,
87
+ dtype = self ._dtype ,
88
+ is_bias = False )
89
+ self ._h2h_w = self ._helper .create_parameter (
90
+ attr = self ._helper .param_attr ,
91
+ shape = h2h_param_shape ,
92
+ dtype = self ._dtype ,
93
+ is_bias = False )
94
+ self ._h2o_w = self ._helper .create_parameter (
95
+ attr = self ._helper .param_attr ,
96
+ shape = h2o_param_shape ,
97
+ dtype = self ._dtype ,
98
+ is_bias = False )
99
+
100
+ def forward (self , input , pre_hidden ):
101
+
102
+ tmp_i2h = self ._helper .create_variable_for_type_inference (self ._dtype )
103
+ tmp_h2h = self ._helper .create_variable_for_type_inference (self ._dtype )
104
+ hidden = self ._helper .create_variable_for_type_inference (self ._dype )
105
+ out = self ._helper .create_variable_for_type_inference (self ._dype )
106
+ softmax_out = self ._helper .create_variable_for_type_inference (
107
+ self ._dtype )
108
+ reduce_out = self ._helper .create_variable_for_type_inference (
109
+ self ._dtype )
110
+ self ._helper .append_op (
111
+ type = "mul" ,
112
+ inputs = {"X" : input ,
113
+ "Y" : self ._i2h_w },
114
+ outputs = {"Out" : tmp_i2h },
115
+ attrs = {"x_num_col_dims" : 1 ,
116
+ "y_num_col_dims" : 1 })
117
+
118
+ self ._helper .append_op (
119
+ type = "mul" ,
120
+ inputs = {"X" : pre_hidden ,
121
+ "Y" : self ._h2h_w },
122
+ outputs = {"Out" : tmp_h2h },
123
+ attrs = {"x_num_col_dims" : 1 ,
124
+ "y_num_col_dims" : 1 })
125
+
126
+ self ._helper .append_op (
127
+ type = "elementwise_add" ,
128
+ inputs = {'X' : tmp_h2h ,
129
+ 'Y' : tmp_i2h },
130
+ outputs = {'Out' : hidden },
131
+ attrs = {'axis' : - 1 ,
132
+ 'use_mkldnn' : False })
133
+ hidden = self ._helper .append_activation (hidden )
134
+
135
+ self ._helper .append_op (
136
+ type = "mul" ,
137
+ inputs = {"X" : hidden ,
138
+ "Y" : self ._h2o_w },
139
+ outputs = {"Out" : out },
140
+ attrs = {"x_num_col_dims" : 1 ,
141
+ "y_num_col_dims" : 1 })
142
+
143
+ self ._helper .append_op (
144
+ type = "softmax" ,
145
+ inputs = {"X" : out },
146
+ outputs = {"Out" : softmax_out },
147
+ attrs = {"use_cudnn" : False })
148
+
149
+ self ._helper .append_op (
150
+ type = 'reduce_sum' ,
151
+ inputs = {'X' : softmax_out },
152
+ outputs = {'Out' : reduce_out },
153
+ attrs = {'dim' : None ,
154
+ 'keep_dim' : False ,
155
+ 'reduce_all' : True })
156
+
157
+ return reduce_out , hidden
158
+
159
+
160
+ class SimpleRNN (fluid .imperative .Layer ):
161
+ def __init__ (self ):
162
+ super (SimpleRNN , self ).__init__ ()
163
+ self .seq_len = 4
164
+ self ._cell = SimpleRNNCell (
165
+ 3 ,
166
+ 3 ,
167
+ 3 ,
168
+ fluid .ParamAttr (initializer = fluid .initializer .Constant (value = 0.1 )))
169
+
170
+ def forward (self , inputs ):
171
+ outs = list ()
172
+ pre_hiddens = list ()
173
+
174
+ init_hidden = fluid .layers .tensor .create_parameter (
175
+ attr = fluid .ParamAttr (
176
+ initializer = fluid .initializer .Constant (value = 0.1 )),
177
+ shape = [1 , 3 ],
178
+ dtype = 'float32' ,
179
+ is_bias = False )
180
+ pre_hidden = init_hidden
181
+ for i in range (self .seq_len ):
182
+ input = fluid .layers .slice (
183
+ inputs , axes = [1 ], starts = [i ], ends = [i + 1 ])
184
+ input = fluid .layers .reshape (input , shape = [1 , 3 ])
185
+ out_softmax , pre_hidden = self ._cell (input , pre_hidden )
186
+ outs .append (out_softmax )
187
+
188
+ return outs , pre_hiddens
189
+
190
+
69
191
class TestImperative (unittest .TestCase ):
70
192
def test_sum_op (self ):
71
193
x = np .ones ([2 , 2 ], np .float32 )
@@ -211,6 +333,41 @@ def test_mlp(self):
211
333
self .assertTrue (np .allclose (dy_out , static_out ))
212
334
self .assertTrue (np .allclose (dy_grad , static_grad ))
213
335
336
+ def test_rnn (self ):
337
+ np_inp = np .array ([[1.0 , 2.0 , 3.0 ], [4.0 , 5.0 , 6.0 ], [7.0 , 8.0 , 9.0 ],
338
+ [10.0 , 11.0 , 12.0 ]])
339
+ np_inp = np_inp .reshape ((1 , 4 , 3 ))
340
+ np_inp = np_inp .astype (np .float32 )
341
+ with fluid .imperative .guard ():
342
+ var_inp = fluid .imperative .base .to_variable (np_inp )
343
+ var_inp = fluid .layers .reshape (var_inp , shape = [1 , 4 , 3 ])
344
+ simple_rnn = SimpleRNN ()
345
+ outs , pre_hiddens = simple_rnn .forward (var_inp )
346
+ dy_out = outs [3 ]._numpy ()
347
+ outs [3 ]._backward ()
348
+ dy_grad_h2o = simple_rnn ._cell ._h2o_w ._gradient ()
349
+ dy_grad_h2h = simple_rnn ._cell ._h2h_w ._gradient ()
350
+ dy_grad_i2h = simple_rnn ._cell ._i2h_w ._gradient ()
351
+
352
+ with new_program_scope ():
353
+ inp = fluid .layers .data (
354
+ name = "inp" , shape = [1 , 4 , 3 ], append_batch_size = False )
355
+ simple_rnn = SimpleRNN ()
356
+ outs , pre_hiddens = simple_rnn (inp )
357
+ param_grads = fluid .backward .append_backward (outs [3 ])
358
+ exe = fluid .Executor (fluid .CPUPlace ())
359
+ exe .run (fluid .default_startup_program ())
360
+ static_out , static_grad_h2o , static_grad_h2h , static_grad_i2h = exe .run (
361
+ feed = {inp .name : np_inp },
362
+ fetch_list = [
363
+ outs [3 ].name , param_grads [0 ][1 ].name ,
364
+ param_grads [1 ][1 ].name , param_grads [2 ][1 ].name
365
+ ])
366
+ self .assertTrue (np .allclose (dy_out , static_out ))
367
+ self .assertTrue (np .allclose (dy_grad_h2o , static_grad_h2o ))
368
+ self .assertTrue (np .allclose (dy_grad_h2h , static_grad_h2h ))
369
+ self .assertTrue (np .allclose (dy_grad_i2h , static_grad_i2h ))
370
+
214
371
215
372
if __name__ == '__main__' :
216
373
unittest .main ()
0 commit comments