diff --git a/agentrun/model/__model_service_async_template.py b/agentrun/model/__model_service_async_template.py index 673abc5..e94331d 100644 --- a/agentrun/model/__model_service_async_template.py +++ b/agentrun/model/__model_service_async_template.py @@ -208,6 +208,15 @@ def model_info(self, config: Optional[Config] = None) -> BaseInfo: assert self.provider_settings is not None assert self.provider_settings.base_url is not None + api_key = self.provider_settings.api_key or "" + if not api_key and self.credential_name: + from agentrun.credential import Credential + + credential = Credential.get_by_name( + self.credential_name, config=cfg + ) + api_key = credential.credential_secret or "" + default_model = ( self.provider_settings.model_names[0] if self.provider_settings.model_names is not None @@ -216,7 +225,7 @@ def model_info(self, config: Optional[Config] = None) -> BaseInfo: ) return BaseInfo( - api_key=self.provider_settings.api_key or "", + api_key=api_key, base_url=self.provider_settings.base_url, model=default_model, headers=cfg.get_headers(), diff --git a/agentrun/model/model_service.py b/agentrun/model/model_service.py index 4acb38b..24f9cce 100644 --- a/agentrun/model/model_service.py +++ b/agentrun/model/model_service.py @@ -379,6 +379,15 @@ def model_info(self, config: Optional[Config] = None) -> BaseInfo: assert self.provider_settings is not None assert self.provider_settings.base_url is not None + api_key = self.provider_settings.api_key or "" + if not api_key and self.credential_name: + from agentrun.credential import Credential + + credential = Credential.get_by_name( + self.credential_name, config=cfg + ) + api_key = credential.credential_secret or "" + default_model = ( self.provider_settings.model_names[0] if self.provider_settings.model_names is not None @@ -387,7 +396,7 @@ def model_info(self, config: Optional[Config] = None) -> BaseInfo: ) return BaseInfo( - api_key=self.provider_settings.api_key or "", + api_key=api_key, base_url=self.provider_settings.base_url, model=default_model, headers=cfg.get_headers(), diff --git a/tests/e2e/__test_model_async_template.py b/tests/e2e/__test_model_async_template.py index 37354ce..4f862d3 100644 --- a/tests/e2e/__test_model_async_template.py +++ b/tests/e2e/__test_model_async_template.py @@ -39,7 +39,6 @@ ) from agentrun.utils.config import Config from agentrun.utils.exception import ( - ClientError, ResourceAlreadyExistError, ResourceNotExistError, ) @@ -264,6 +263,64 @@ async def test_model_service_invoke_async(self, model_service_name: str): ms.delete() + async def test_model_service_with_credential_async( + self, model_service_name: str + ): + # 创建 Credential + from agentrun.credential import ( + Credential, + CredentialConfig, + CredentialCreateInput, + ) + + cr = await Credential.create_async( + CredentialCreateInput( + credential_name=f"{model_service_name}-credential", + enabled=True, + credential_config=CredentialConfig.outbound_llm_api_key( + api_key=api_key, + provider="openai", + ), + ) + ) + + # 创建 model service + ms = await ModelService.create_async( + ModelServiceCreateInput( + model_service_name=model_service_name, + description="原始描述", + model_type=ModelType.LLM, + provider="openai", + credential_name=cr.credential_name, + provider_settings=ProviderSettings( + base_url=base_url, + model_names=model_names, + ), + ) + ) + ms.wait_until_ready_or_failed() + + result = ms.completions( + messages=[ + { + "role": "system", + "content": "你是一个回音壁,会原封不动返回用户的输入", + }, + {"role": "user", "content": "你好!"}, + {"role": "assistant", "content": "你好!"}, + {"role": "user", "content": "今天天气怎么样?"}, + ], + stream=False, + ) + assert isinstance(result, ModelResponse) + assert ( + pydash.get(result, "choices[0].message.content") + == "今天天气怎么样?" + ) + + await ms.delete_async() + await cr.delete_async() + class TestModelProxy: """ModelProxy 模块 E2E 测试""" diff --git a/tests/e2e/test_model.py b/tests/e2e/test_model.py index d1c73f2..01e5738 100644 --- a/tests/e2e/test_model.py +++ b/tests/e2e/test_model.py @@ -49,7 +49,6 @@ ) from agentrun.utils.config import Config from agentrun.utils.exception import ( - ClientError, ResourceAlreadyExistError, ResourceNotExistError, ) @@ -474,6 +473,120 @@ def test_model_service_invoke(self, model_service_name: str): ms.delete() + async def test_model_service_with_credential_async( + self, model_service_name: str + ): + # 创建 Credential + from agentrun.credential import ( + Credential, + CredentialConfig, + CredentialCreateInput, + ) + + cr = await Credential.create_async( + CredentialCreateInput( + credential_name=f"{model_service_name}-credential", + enabled=True, + credential_config=CredentialConfig.outbound_llm_api_key( + api_key=api_key, + provider="openai", + ), + ) + ) + + # 创建 model service + ms = await ModelService.create_async( + ModelServiceCreateInput( + model_service_name=model_service_name, + description="原始描述", + model_type=ModelType.LLM, + provider="openai", + credential_name=cr.credential_name, + provider_settings=ProviderSettings( + base_url=base_url, + model_names=model_names, + ), + ) + ) + ms.wait_until_ready_or_failed() + + result = ms.completions( + messages=[ + { + "role": "system", + "content": "你是一个回音壁,会原封不动返回用户的输入", + }, + {"role": "user", "content": "你好!"}, + {"role": "assistant", "content": "你好!"}, + {"role": "user", "content": "今天天气怎么样?"}, + ], + stream=False, + ) + assert isinstance(result, ModelResponse) + assert ( + pydash.get(result, "choices[0].message.content") + == "今天天气怎么样?" + ) + + await ms.delete_async() + await cr.delete_async() + + def test_model_service_with_credential(self, model_service_name: str): + # 创建 Credential + from agentrun.credential import ( + Credential, + CredentialConfig, + CredentialCreateInput, + ) + + cr = Credential.create( + CredentialCreateInput( + credential_name=f"{model_service_name}-credential", + enabled=True, + credential_config=CredentialConfig.outbound_llm_api_key( + api_key=api_key, + provider="openai", + ), + ) + ) + + # 创建 model service + ms = ModelService.create( + ModelServiceCreateInput( + model_service_name=model_service_name, + description="原始描述", + model_type=ModelType.LLM, + provider="openai", + credential_name=cr.credential_name, + provider_settings=ProviderSettings( + base_url=base_url, + model_names=model_names, + ), + ) + ) + ms.wait_until_ready_or_failed() + + result = ms.completions( + messages=[ + { + "role": "system", + "content": "你是一个回音壁,会原封不动返回用户的输入", + }, + {"role": "user", "content": "你好!"}, + {"role": "assistant", "content": "你好!"}, + {"role": "user", "content": "今天天气怎么样?"}, + ], + stream=False, + ) + assert isinstance(result, ModelResponse) + assert ( + pydash.get(result, "choices[0].message.content") + == "今天天气怎么样?" + ) + + ms.delete() + cr.delete() + class TestModelProxy: """ModelProxy 模块 E2E 测试"""