@@ -1198,59 +1198,4 @@ gpt-3() {
1198
1198
bash ${nlp_dir} /scripts/regression/ci_gpt-3.sh
1199
1199
print_info $? ` ls -lt ${log_path} | grep gpt | head -n 1 | awk ' {print $9}' `
1200
1200
}
1201
- llama (){
1202
- cd ${nlp_dir} /examples/language_model/llama/
1203
- # lora tuning
1204
- python -u -m paddle.distributed.fleet.launch finetune_generation.py \
1205
- --output_dir ./checkpoints/ \
1206
- --per_device_train_batch_size 2 \
1207
- --gradient_accumulation_steps 2 \
1208
- --per_device_eval_batch_size 4 \
1209
- --model_name_or_path " __internal_testing__/micro-random-llama" \
1210
- --task_name squad \
1211
- --warmup_steps 30 \
1212
- --logging_steps 1 \
1213
- --max_steps 1 \
1214
- --save_steps 1 \
1215
- --evaluation_strategy epoch \
1216
- --save_strategy epoch \
1217
- --src_length 1024 \
1218
- --tgt_length 1024 \
1219
- --fp16 \
1220
- --fp16_opt_level O2 \
1221
- --do_train \
1222
- --disable_tqdm True \
1223
- --load_best_model_at_end True \
1224
- --metric_for_best_model accuracy \
1225
- --eval_with_do_generation False \
1226
- --recompute \
1227
- --save_total_limit 1 \
1228
- --overwrite_output_dir > ${log_path} /llama_finetune>> ${log_path} /llama_finetune 2>&1
1229
- print_info $? llama_finetune
1230
- }
1231
- bloom (){
1232
- cd ${nlp_dir} examples/language_model/bloom
1233
- python -m paddle.distributed.launch finetune_generation.py \
1234
- --model_name_or_path bigscience/bloom-560m \
1235
- --task_name_or_path " dureader_qg" \
1236
- --output_dir ./checkpoints/bloom-560m \
1237
- --per_device_train_batch_size 2 \
1238
- --gradient_accumulation_steps 2 \
1239
- --per_device_eval_batch_size 4 \
1240
- --logging_steps 1 \
1241
- --max_steps 1 \
1242
- --save_steps 1 \
1243
- --evaluation_strategy epoch \
1244
- --save_strategy epoch \
1245
- --tensor_parallel_degree 2 \
1246
- --recompute \
1247
- --save_total_limit 1 \
1248
- --scale_loss 32768 \
1249
- --overwrite_output_dir
1250
- }
1251
- refactor_training_loop (){
1252
- llama
1253
- gpt
1254
- transformers
1255
- }
1256
1201
$1
0 commit comments