Skip to content

Commit 4d736b5

Browse files
authored
fix ernie tiny v2 model config (#4049)
1 parent c6f6fee commit 4d736b5

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

paddlenlp/transformers/ernie/modeling.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -821,7 +821,7 @@ class ErniePretrainedModel(PretrainedModel):
821821
"num_hidden_layers": 12,
822822
"task_type_vocab_size": 3,
823823
"type_vocab_size": 4,
824-
"use_task_id": True,
824+
"use_task_id": False,
825825
"vocab_size": 40000,
826826
},
827827
"ernie-3.0-tiny-medium-v2": {
@@ -836,7 +836,7 @@ class ErniePretrainedModel(PretrainedModel):
836836
"num_hidden_layers": 6,
837837
"task_type_vocab_size": 16,
838838
"type_vocab_size": 4,
839-
"use_task_id": True,
839+
"use_task_id": False,
840840
"vocab_size": 40000,
841841
},
842842
"ernie-3.0-tiny-mini-v2": {
@@ -851,7 +851,7 @@ class ErniePretrainedModel(PretrainedModel):
851851
"num_hidden_layers": 6,
852852
"task_type_vocab_size": 16,
853853
"type_vocab_size": 4,
854-
"use_task_id": True,
854+
"use_task_id": False,
855855
"vocab_size": 40000,
856856
},
857857
"ernie-3.0-tiny-micro-v2": {
@@ -866,7 +866,7 @@ class ErniePretrainedModel(PretrainedModel):
866866
"num_hidden_layers": 4,
867867
"task_type_vocab_size": 16,
868868
"type_vocab_size": 4,
869-
"use_task_id": True,
869+
"use_task_id": False,
870870
"vocab_size": 40000,
871871
},
872872
"ernie-3.0-tiny-nano-v2": {
@@ -881,22 +881,22 @@ class ErniePretrainedModel(PretrainedModel):
881881
"num_hidden_layers": 4,
882882
"task_type_vocab_size": 16,
883883
"type_vocab_size": 4,
884-
"use_task_id": True,
884+
"use_task_id": False,
885885
"vocab_size": 40000,
886886
},
887887
"ernie-3.0-tiny-pico-v2": {
888888
"attention_probs_dropout_prob": 0.1,
889889
"hidden_act": "gelu",
890890
"hidden_dropout_prob": 0.1,
891891
"hidden_size": 128,
892-
"intermediate_size": 1248,
892+
"intermediate_size": 512,
893893
"initializer_range": 0.02,
894894
"max_position_embeddings": 2048,
895895
"num_attention_heads": 2,
896896
"num_hidden_layers": 3,
897897
"task_type_vocab_size": 16,
898898
"type_vocab_size": 4,
899-
"use_task_id": True,
899+
"use_task_id": False,
900900
"vocab_size": 40000,
901901
},
902902
}

0 commit comments

Comments
 (0)