@@ -105,7 +105,10 @@ class RNN(Layer):
105
105
Similar to the DynamicRNN in TL 1.x.
106
106
107
107
If the `sequence_length` is provided in RNN's forwarding and both `return_last_output` and `return_last_state`
108
- are set as `True`, the forward function will automatically ignore the paddings.
108
+ are set as `True`, the forward function will automatically ignore the paddings. Note that if `return_last_output`
109
+ is set as `False`, the synced sequence outputs will still include outputs which correspond with paddings,
110
+ but users are free to select which slice of outputs to be used in following procedure.
111
+
109
112
The `sequence_length` should be a list of integers which indicates the length of each sequence.
110
113
It is recommended to
111
114
`tl.layers.retrieve_seq_length_op3 <https://tensorlayer.readthedocs.io/en/latest/modules/layers.html#compute-sequence-length-3>`__
@@ -244,16 +247,15 @@ def forward(self, inputs, sequence_length=None, initial_state=None, **kwargs):
244
247
"but got an actual length of a sequence %d" % i
245
248
)
246
249
247
- sequence_length = [i - 1 for i in sequence_length ]
250
+ sequence_length = [i - 1 if i >= 1 else 0 for i in sequence_length ]
248
251
249
252
# set warning
250
- if (not self .return_last_state or not self .return_last_output ) and sequence_length is not None :
251
- warnings .warn (
252
- 'return_last_output is set as %s ' % self .return_last_output +
253
- 'and return_last_state is set as %s. ' % self .return_last_state +
254
- 'When sequence_length is provided, both are recommended to set as True. ' +
255
- 'Otherwise, padding will be considered while RNN is forwarding.'
256
- )
253
+ # if (not self.return_last_output) and sequence_length is not None:
254
+ # warnings.warn(
255
+ # 'return_last_output is set as %s ' % self.return_last_output +
256
+ # 'When sequence_length is provided, it is recommended to set as True. ' +
257
+ # 'Otherwise, padding will be considered while RNN is forwarding.'
258
+ # )
257
259
258
260
# return the last output, iterating each seq including padding ones. No need to store output during each
259
261
# time step.
@@ -274,6 +276,7 @@ def forward(self, inputs, sequence_length=None, initial_state=None, **kwargs):
274
276
self .cell .reset_recurrent_dropout_mask ()
275
277
276
278
# recurrent computation
279
+ # FIXME: if sequence_length is provided (dynamic rnn), only iterate max(sequence_length) times.
277
280
for time_step in range (total_steps ):
278
281
279
282
cell_output , states = self .cell .call (inputs [:, time_step , :], states , training = self .is_train )
@@ -758,6 +761,7 @@ def forward(self, inputs, fw_initial_state=None, bw_initial_state=None, **kwargs
758
761
return outputs
759
762
760
763
764
+ '''
761
765
class ConvRNNCell(object):
762
766
"""Abstract object representing an Convolutional RNN Cell."""
763
767
@@ -1071,6 +1075,8 @@ def __init__(
1071
1075
self._add_layers(self.outputs)
1072
1076
self._add_params(rnn_variables)
1073
1077
1078
+ '''
1079
+
1074
1080
1075
1081
# @tf.function
1076
1082
def retrieve_seq_length_op (data ):
0 commit comments