File tree Expand file tree Collapse file tree 3 files changed +30
-1
lines changed
paddlenlp/transformers/ernie_gram Expand file tree Collapse file tree 3 files changed +30
-1
lines changed Original file line number Diff line number Diff line change @@ -329,6 +329,11 @@ Transformer预训练模型汇总
329
329
| ERNIE-GRAM _ |``ernie-gram-zh`` | Chinese | 12-layer, 768-hidden, |
330
330
| | | | 12-heads, 108M parameters. |
331
331
| | | | Trained on Chinese text. |
332
+ + +----------------------------------------------------------------------------------+--------------+-----------------------------------------+
333
+ | |``ernie-gram-zh-finetuned-dureader-robust`` | Chinese | 12-layer, 768-hidden, |
334
+ | | | | 12-heads, 108M parameters. |
335
+ | | | | Trained on Chinese text. |
336
+ | | | | Then finetuned on dreader-robust |
332
337
+--------------------+----------------------------------------------------------------------------------+--------------+-----------------------------------------+
333
338
| GPT_ |``gpt-cpm-large-cn`` | Chinese | 32-layer, 2560-hidden, |
334
339
| | | | 32-heads, 2.6B parameters. |
Original file line number Diff line number Diff line change @@ -98,12 +98,27 @@ class ErnieGramPretrainedModel(PretrainedModel):
98
98
"type_vocab_size" : 2 ,
99
99
"vocab_size" : 18018
100
100
},
101
+ "ernie-gram-zh-finetuned-dureader-robust" : {
102
+ "attention_probs_dropout_prob" : 0.1 ,
103
+ "emb_size" : 768 ,
104
+ "hidden_act" : "gelu" ,
105
+ "hidden_dropout_prob" : 0.1 ,
106
+ "hidden_size" : 768 ,
107
+ "initializer_range" : 0.02 ,
108
+ "max_position_embeddings" : 512 ,
109
+ "num_attention_heads" : 12 ,
110
+ "num_hidden_layers" : 12 ,
111
+ "type_vocab_size" : 2 ,
112
+ "vocab_size" : 18018
113
+ },
101
114
}
102
115
resource_files_names = {"model_state" : "model_state.pdparams" }
103
116
pretrained_resource_files_map = {
104
117
"model_state" : {
105
118
"ernie-gram-zh" :
106
119
"https://bj.bcebos.com/paddlenlp/models/transformers/ernie_gram_zh/ernie_gram_zh.pdparams" ,
120
+ "ernie-gram-zh-finetuned-dureader-robust" :
121
+ "https://bj.bcebos.com/paddlenlp/models/transformers/ernie-gram-zh-finetuned-dureader-robust/model_state.pdparams" ,
107
122
},
108
123
}
109
124
base_model_prefix = "ernie_gram"
Original file line number Diff line number Diff line change @@ -77,9 +77,18 @@ class ErnieGramTokenizer(ErnieTokenizer):
77
77
"vocab_file" : {
78
78
"ernie-gram-zh" :
79
79
"https://bj.bcebos.com/paddlenlp/models/transformers/ernie_gram_zh/vocab.txt" ,
80
+ "ernie-gram-zh-finetuned-dureader-robust" :
81
+ "https://bj.bcebos.com/paddlenlp/models/transformers/ernie_gram_zh/vocab.txt" ,
80
82
}
81
83
}
82
- pretrained_init_configuration = {"ernie-gram-zh" : {"do_lower_case" : True }, }
84
+ pretrained_init_configuration = {
85
+ "ernie-gram-zh" : {
86
+ "do_lower_case" : True
87
+ },
88
+ "ernie-gram-zh-finetuned-dureader-robust" : {
89
+ "do_lower_case" : True
90
+ },
91
+ }
83
92
84
93
def __init__ (self ,
85
94
vocab_file ,
You can’t perform that action at this time.
0 commit comments