-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtotal_eval.sh
More file actions
21 lines (16 loc) · 1.32 KB
/
total_eval.sh
File metadata and controls
21 lines (16 loc) · 1.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
export HF_ENDPOINT=https://hf-mirror.com
source /path/to/miniconda3/bin/activate
conda activate lm-eval
# args='{"pretrained":"/path/to/LISA/low-rank-attention/LLaMA-Factory-0.9.0/our_trained_models/1.24.llama2-13b-newarch_2layer256_relu-applyrope-huber1-ce0.75_mse0.25-zero2-LA7,8,9,10,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39-rank800-3e4-4wupdate-bsz128-merge_v2_head_7b-nodetach/checkpoint-31000","special_training_attn_layers":[7,8,9,10,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39],"hidden_size_low_rank":800,"attn_type":"two_layers"}'
args='{"pretrained":"/path/to/LISA/low-rank-attention/LLaMA-Factory-0.9.0/our_exported_models/2.2.merge_v2_head_7b.Llama-2-7b-hf.DS17-5,6,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31.lora-ovgud-42/checkpoint-30000"}'
cd /path/to/LISA/low-rank-attention/evaluate-main/metrics
CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 accelerate launch --main_process_port 29501 -m lm_eval --model hf \
--model_args $args \
--tasks gsm8k_cot \
--batch_size 1
wait
# 5,6,12,15,17,18,19,20,21,22,23,24,25,26,27,28,29,30
# 5,6,9,11,13,15,17,18,20,21,23,24,25,26,27,28,29,30,31
# 4,6,8,10,12,14,16,18,20,22,23,24,25,26,27,28,29,30
# 4,5,7,8,10,11,13,14,16,17,19,20,22,23,24,25,26,27,28,29,30
# 4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30