@@ -105,7 +105,10 @@ class RNN(Layer):
105105 Similar to the DynamicRNN in TL 1.x.
106106
107107 If the `sequence_length` is provided in RNN's forwarding and both `return_last_output` and `return_last_state`
108- are set as `True`, the forward function will automatically ignore the paddings.
108+ are set as `True`, the forward function will automatically ignore the paddings. Note that if `return_last_output`
109+ is set as `False`, the synced sequence outputs will still include outputs which correspond with paddings,
110+ but users are free to select which slice of outputs to be used in following procedure.
111+
109112 The `sequence_length` should be a list of integers which indicates the length of each sequence.
110113 It is recommended to
111114 `tl.layers.retrieve_seq_length_op3 <https://tensorlayer.readthedocs.io/en/latest/modules/layers.html#compute-sequence-length-3>`__
@@ -244,16 +247,15 @@ def forward(self, inputs, sequence_length=None, initial_state=None, **kwargs):
244247 "but got an actual length of a sequence %d" % i
245248 )
246249
247- sequence_length = [i - 1 for i in sequence_length ]
250+ sequence_length = [i - 1 if i >= 1 else 0 for i in sequence_length ]
248251
249252 # set warning
250- if (not self .return_last_state or not self .return_last_output ) and sequence_length is not None :
251- warnings .warn (
252- 'return_last_output is set as %s ' % self .return_last_output +
253- 'and return_last_state is set as %s. ' % self .return_last_state +
254- 'When sequence_length is provided, both are recommended to set as True. ' +
255- 'Otherwise, padding will be considered while RNN is forwarding.'
256- )
253+ # if (not self.return_last_output) and sequence_length is not None:
254+ # warnings.warn(
255+ # 'return_last_output is set as %s ' % self.return_last_output +
256+ # 'When sequence_length is provided, it is recommended to set as True. ' +
257+ # 'Otherwise, padding will be considered while RNN is forwarding.'
258+ # )
257259
258260 # return the last output, iterating each seq including padding ones. No need to store output during each
259261 # time step.
@@ -274,6 +276,7 @@ def forward(self, inputs, sequence_length=None, initial_state=None, **kwargs):
274276 self .cell .reset_recurrent_dropout_mask ()
275277
276278 # recurrent computation
279+ # FIXME: if sequence_length is provided (dynamic rnn), only iterate max(sequence_length) times.
277280 for time_step in range (total_steps ):
278281
279282 cell_output , states = self .cell .call (inputs [:, time_step , :], states , training = self .is_train )
@@ -758,6 +761,7 @@ def forward(self, inputs, fw_initial_state=None, bw_initial_state=None, **kwargs
758761 return outputs
759762
760763
764+ '''
761765class ConvRNNCell(object):
762766 """Abstract object representing an Convolutional RNN Cell."""
763767
@@ -1071,6 +1075,8 @@ def __init__(
10711075 self._add_layers(self.outputs)
10721076 self._add_params(rnn_variables)
10731077
1078+ '''
1079+
10741080
10751081# @tf.function
10761082def retrieve_seq_length_op (data ):
0 commit comments