3232 "meetkai/functionary-medium-v3.2" ,
3333 "Qwen/Qwen2-7B-Instruct" ,
3434 "Qwen/Qwen2-VL-7B-Instruct" ,
35- "Qwen/Qwen2.5-7B-Instruct" , # "Qwen/Qwen2.5-72B-Instruct", "Qwen/Qwen2.5-Coder-7B-Instruct",
36- "Qwen/Qwen2.5-Math-7B-Instruct" , # "Qwen/Qwen2.5-Math-72B-Instruct",
35+ "Qwen/Qwen2.5-7B-Instruct" ,
36+ "Qwen/Qwen2.5-Math-7B-Instruct" ,
37+ "microsoft/Phi-3-mini-4k-instruct" ,
38+ "microsoft/Phi-3-small-8k-instruct" ,
39+ "microsoft/Phi-3-medium-4k-instruct" ,
3740 "microsoft/Phi-3.5-mini-instruct" ,
38-
41+ "indischepartij/MiniCPM-3B-OpenHermes-2.5-v2" ,
42+ "teknium/OpenHermes-2.5-Mistral-7B" ,
43+ "TheBloke/FusionNet_34Bx2_MoE-AWQ" ,
44+ "bofenghuang/vigogne-2-70b-chat" ,
45+ "mlabonne/AlphaMonarch-7B" ,
46+ "OrionStarAI/Orion-14B-Chat" ,
47+ "openchat/openchat-3.5-0106" ,
48+ "deepseek-ai/deepseek-coder-33b-instruct" ,
49+ "abacusai/Fewshot-Metamath-OrcaVicuna-Mistral" ,
50+ "CohereForAI/c4ai-command-r-plus" ,
51+ "THUDM/chatglm3-6b" ,
52+ "derek33125/project-angel-chatglm4" ,
53+ "deepseek-ai/DeepSeek-Coder-V2-Instruct" ,
54+ "deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct" ,
55+ "deepseek-ai/DeepSeek-V2.5" ,
56+
57+ # Needs debugging:
58+ # "eachadea/vicuna-13b-1.1",
59+ # "microsoft/Phi-3-vision-instruct",
60+
3961 # Gated models:
4062 "meta-llama/Meta-Llama-3.1-8B-Instruct" ,
63+ "google/gemma-7b-it" ,
4164 "google/gemma-2-2b-it" ,
65+ "mistralai/Mistral-7B-Instruct-v0.2" ,
4266 "mistralai/Mixtral-8x7B-Instruct-v0.1" ,
4367]
4468
@@ -52,15 +76,15 @@ def strftime_now(format):
5276 return datetime .now ().strftime (format )
5377
5478def handle_chat_template (model_id , variant , template_src ):
55- print (f"# { model_id } @ { variant } " )
79+ print (f"# { model_id } @ { variant } " , flush = True )
5680 model_name = model_id .replace ("/" , "-" )
5781 base_name = f'{ model_name } -{ variant } ' if variant else model_name
5882 template_file = f'tests/chat/templates/{ base_name } .jinja'
5983 print (f'template_file: { template_file } ' )
6084 with open (template_file , 'w' ) as f :
6185 f .write (template_src )
6286
63- print (f"- { template_file } " )
87+ print (f"- { template_file } " , flush = True )
6488
6589 env = jinja2 .Environment (
6690 trim_blocks = True ,
@@ -91,7 +115,7 @@ def handle_chat_template(model_id, variant, template_src):
91115 continue
92116
93117 output_file = f'tests/chat/goldens/{ base_name } -{ context_name } .txt'
94- print (f"- { output_file } " )
118+ print (f"- { output_file } " , flush = True )
95119 try :
96120 output = template .render (** context )
97121 except :
@@ -103,7 +127,7 @@ def handle_chat_template(model_id, variant, template_src):
103127 try :
104128 output = template .render (** context )
105129 except Exception as e :
106- print (f" ERROR: { e } " )
130+ print (f" ERROR: { e } " , flush = True )
107131 output = f"ERROR: { e } "
108132
109133 with open (output_file , 'w' ) as f :
0 commit comments