Skip to content

Commit 01f89ab

Browse files
committed
fix bug in seq2seq
1 parent 19a7fcb commit 01f89ab

File tree

1 file changed

+5
-0
lines changed

1 file changed

+5
-0
lines changed

tensorlayer/layers/recurrent.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1428,6 +1428,8 @@ class Seq2Seq(Layer):
14281428
The arguments for the cell initializer.
14291429
n_hidden : int
14301430
The number of hidden units in the layer.
1431+
initializer : initializer
1432+
The initializer for the parameters.
14311433
encode_sequence_length : tensor
14321434
For encoder sequence length, see :class:`DynamicRNNLayer` .
14331435
decode_sequence_length : tensor
@@ -1519,6 +1521,7 @@ def __init__(
15191521
cell_fn, #tf.nn.rnn_cell.LSTMCell,
15201522
cell_init_args={'state_is_tuple': True},
15211523
n_hidden=256,
1524+
initializer=tf.random_uniform_initializer(-0.1, 0.1),
15221525
encode_sequence_length=None,
15231526
decode_sequence_length=None,
15241527
initial_state_encode=None,
@@ -1547,6 +1550,7 @@ def __init__(
15471550
cell_fn=cell_fn,
15481551
cell_init_args=cell_init_args,
15491552
n_hidden=n_hidden,
1553+
initializer=initializer,
15501554
initial_state=initial_state_encode,
15511555
dropout=dropout,
15521556
n_layer=n_layer,
@@ -1561,6 +1565,7 @@ def __init__(
15611565
cell_fn=cell_fn,
15621566
cell_init_args=cell_init_args,
15631567
n_hidden=n_hidden,
1568+
initializer=initializer,
15641569
initial_state=(network_encode.final_state if initial_state_decode is None else initial_state_decode),
15651570
dropout=dropout,
15661571
n_layer=n_layer,

0 commit comments

Comments
 (0)