@@ -1681,6 +1681,31 @@ def get_default_conv_template(self, model_path: str) -> Conversation:
16811681 return get_conv_template ("llama2-chinese" )
16821682
16831683
1684+ class Lamma2ChineseAlpacaAdapter (BaseModelAdapter ):
1685+ """The model adapter for ymcui/Chinese-LLaMA-Alpaca sft"""
1686+
1687+ def match (self , model_path : str ):
1688+ return "chinese-alpaca" in model_path .lower ()
1689+
1690+ def load_model (self , model_path : str , from_pretrained_kwargs : dict ):
1691+ revision = from_pretrained_kwargs .get ("revision" , "main" )
1692+ tokenizer = AutoTokenizer .from_pretrained (
1693+ model_path ,
1694+ trust_remote_code = True ,
1695+ revision = revision ,
1696+ )
1697+ model = AutoModelForCausalLM .from_pretrained (
1698+ model_path ,
1699+ trust_remote_code = True ,
1700+ low_cpu_mem_usage = True ,
1701+ ** from_pretrained_kwargs ,
1702+ )
1703+ return model , tokenizer
1704+
1705+ def get_default_conv_template (self , model_path : str ) -> Conversation :
1706+ return get_conv_template ("llama2-chinese" )
1707+
1708+
16841709class VigogneAdapter (BaseModelAdapter ):
16851710 """The model adapter for vigogne (e.g., bofenghuang/vigogne-2-7b-chat)"""
16861711
@@ -1895,6 +1920,7 @@ def get_default_conv_template(self, model_path: str) -> Conversation:
18951920register_model_adapter (BGEAdapter )
18961921register_model_adapter (E5Adapter )
18971922register_model_adapter (Lamma2ChineseAdapter )
1923+ register_model_adapter (Lamma2ChineseAlpacaAdapter )
18981924register_model_adapter (VigogneAdapter )
18991925register_model_adapter (OpenLLaMaOpenInstructAdapter )
19001926register_model_adapter (ReaLMAdapter )
0 commit comments