We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d3f34ee commit c25f83cCopy full SHA for c25f83c
examples/language/gpt/hybridparallelism/finetune.py
@@ -243,7 +243,12 @@ def main():
243
# ====================================
244
# gpt2 pretrained model
245
246
- cfg = AutoConfig.from_pretrained(model_name, num_labels=data_builder.num_labels)
+ cfg = AutoConfig.from_pretrained(
247
+ model_name,
248
+ num_labels=data_builder.num_labels,
249
+ pad_token=data_builder.tokenizer.pad_token,
250
+ pad_token_id=data_builder.tokenizer.pad_token_id,
251
+ )
252
253
if model_name == "gpt2":
254
model = GPT2ForSequenceClassification.from_pretrained(model_name, config=cfg).cuda()
0 commit comments