Skip to content

Commit cc7869e

Browse files
authored
Fix decoding sample (#752)
* fix dcoding sample * fix
1 parent 29d56ee commit cc7869e

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

paddlenlp/ops/faster_transformer/sample/decoding_sample.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,8 @@ def do_predict(args):
5757
src_vocab_size=args.src_vocab_size,
5858
trg_vocab_size=args.trg_vocab_size,
5959
max_length=args.max_length + 1,
60-
n_layer=args.n_layer,
60+
num_encoder_layers=args.n_layer,
61+
num_decoder_layers=args.n_layer,
6162
n_head=args.n_head,
6263
d_model=args.d_model,
6364
d_inner_hid=args.d_inner_hid,

paddlenlp/ops/faster_transformer/transformer/decoding.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ def __init__(self,
196196
word_embedding,
197197
positional_embedding,
198198
linear,
199-
n_layer,
199+
num_decoder_layers,
200200
n_head,
201201
d_model,
202202
bos_id=0,
@@ -376,7 +376,7 @@ def forward(self, enc_output, memory_seq_lens):
376376
self.linear_weight, self.linear_bias, self.pos_emb,
377377
self._decoding_strategy, self._beam_size, self._topk, self._topp,
378378
self._n_head,
379-
int(self._d_model / self._n_head), self._n_layer, self._bos_id,
379+
int(self._d_model / self._n_head), self._num_decoder_layers, self._bos_id,
380380
self._eos_id, self._max_out_len, self._beam_search_diversity_rate)
381381

382382
ids = finalize(

paddlenlp/ops/faster_transformer/transformer/faster_transformer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ def __init__(self,
8989
word_embedding=self.trg_word_embedding.word_embedding,
9090
positional_embedding=self.trg_pos_embedding.pos_encoder,
9191
linear=self.decoding_linear,
92-
num_encoder_layers=num_encoder_layers,
9392
num_decoder_layers=num_decoder_layers,
9493
n_head=n_head,
9594
d_model=d_model,

0 commit comments

Comments
 (0)