@@ -88,9 +88,10 @@ template <typename Place, typename T>
88
88
class ContextProjectFunctor {
89
89
public:
90
90
void operator ()(const platform::DeviceContext& context, const LoDTensor& in,
91
- const Tensor& padding_data, Tensor& col,
92
- bool padding_trainable, int context_start, int context_length,
93
- int context_stride, int up_pad, int down_pad) {
91
+ const Tensor& padding_data, bool padding_trainable,
92
+ const int context_start, const int context_length,
93
+ const int context_stride, const int up_pad,
94
+ const int down_pad, Tensor* col) {
94
95
auto lod_level_0 = in.lod ()[0 ];
95
96
96
97
math::Im2ColFunctor<math::ColFormat::kOCF , Place, float > im2col_ocf;
@@ -109,8 +110,8 @@ class ContextProjectFunctor {
109
110
: static_cast <int >(lod_level_0[i]);
110
111
input_row_end = static_cast <int >(lod_level_0[i + 1 ]);
111
112
112
- Tensor out_t = col. Slice (static_cast <int >(lod_level_0[i]),
113
- static_cast <int >(lod_level_0[i + 1 ]));
113
+ Tensor out_t = col-> Slice (static_cast <int >(lod_level_0[i]),
114
+ static_cast <int >(lod_level_0[i + 1 ]));
114
115
115
116
sequence_height = static_cast <int >(out_t .dims ()[0 ]);
116
117
@@ -133,8 +134,8 @@ class ContextProjectFunctor {
133
134
}
134
135
if (padding_trainable) {
135
136
for (int i = 0 ; i < static_cast <int >(lod_level_0.size ()) - 1 ; ++i) {
136
- Tensor out_t = col. Slice (static_cast <int >(lod_level_0[i]),
137
- static_cast <int >(lod_level_0[i + 1 ]));
137
+ Tensor out_t = col-> Slice (static_cast <int >(lod_level_0[i]),
138
+ static_cast <int >(lod_level_0[i + 1 ]));
138
139
139
140
sequence_height = static_cast <int >(out_t .dims ()[0 ]);
140
141
@@ -197,10 +198,11 @@ class ContextProjectFunctor {
197
198
template <typename Place, typename T>
198
199
class ContextProjectGradFunctor {
199
200
public:
200
- void operator ()(const platform::DeviceContext& context, LoDTensor& in,
201
- Tensor& padding_data, Tensor& col, bool padding_trainable,
202
- int context_start, int context_length, int context_stride,
203
- int up_pad, int down_pad, bool input_grad, bool pad_grad) {
201
+ void operator ()(const platform::DeviceContext& context, const LoDTensor& in,
202
+ bool padding_trainable, const int context_start,
203
+ const int context_length, const int context_stride,
204
+ const int up_pad, const int down_pad, bool pad_grad,
205
+ bool input_grad, Tensor* padding_data, Tensor* col) {
204
206
auto lod_level_0 = in.lod ()[0 ];
205
207
206
208
math::Col2ImFunctor<math::ColFormat::kOCF , Place, float > col2im_ocf;
@@ -220,8 +222,8 @@ class ContextProjectGradFunctor {
220
222
: static_cast <int >(lod_level_0[i]);
221
223
input_row_end = static_cast <int >(lod_level_0[i + 1 ]);
222
224
223
- Tensor out_t = col. Slice (static_cast <int >(lod_level_0[i]),
224
- static_cast <int >(lod_level_0[i + 1 ]));
225
+ Tensor out_t = col-> Slice (static_cast <int >(lod_level_0[i]),
226
+ static_cast <int >(lod_level_0[i + 1 ]));
225
227
226
228
sequence_height = static_cast <int >(out_t .dims ()[0 ]);
227
229
@@ -247,8 +249,8 @@ class ContextProjectGradFunctor {
247
249
if (pad_grad) {
248
250
if (padding_trainable) {
249
251
for (int i = 0 ; i < static_cast <int >(lod_level_0.size ()) - 1 ; ++i) {
250
- Tensor out_t = col. Slice (static_cast <int >(lod_level_0[i]),
251
- static_cast <int >(lod_level_0[i + 1 ]));
252
+ Tensor out_t = col-> Slice (static_cast <int >(lod_level_0[i]),
253
+ static_cast <int >(lod_level_0[i + 1 ]));
252
254
253
255
sequence_height = static_cast <int >(out_t .dims ()[0 ]);
254
256
out_t .Resize ({sequence_height * context_length, sequence_width});
@@ -262,7 +264,7 @@ class ContextProjectGradFunctor {
262
264
k + context_length < up_pad ? context_length : up_pad - k;
263
265
Tensor out_t_sub = out_t .Slice (k * context_length,
264
266
k * context_length + padding_size);
265
- Tensor w_sub = padding_data. Slice (k, k + padding_size);
267
+ Tensor w_sub = padding_data-> Slice (k, k + padding_size);
266
268
auto out_t_sub_e = EigenMatrix<T>::From (out_t_sub);
267
269
auto w_sub_e = EigenMatrix<T>::From (w_sub);
268
270
w_sub_e.device (*context.GetEigenDevice <Place>()) =
@@ -295,7 +297,7 @@ class ContextProjectGradFunctor {
295
297
Tensor out_t_sub = out_t .Slice (
296
298
(down_pad_begin_row + t) * context_length - padding_size,
297
299
(down_pad_begin_row + t) * context_length);
298
- Tensor w_sub = padding_data. Slice (
300
+ Tensor w_sub = padding_data-> Slice (
299
301
up_pad + padding_idx, up_pad + padding_idx + padding_size);
300
302
auto out_t_sub_e = EigenMatrix<T>::From (out_t_sub);
301
303
auto w_sub_e = EigenMatrix<T>::From (w_sub);
0 commit comments