diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index f51dcc8285..044e6fbdc8 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -405,9 +405,11 @@ async def chat_completion(self, model: str, system: str, user: str, temperature: if get_settings().config.verbosity_level >= 2: get_logger().info(f"\nSystem prompt:\n{system}") get_logger().info(f"\nUser prompt:\n{user}") - - kwargs["api_key"] = litellm.api_key - + + # Support for Ollama Cloud + if model.startswith('ollama') and get_settings().get("OLLAMA.API_KEY", None): + kwargs["api_key"] = litellm.api_key + # Get completion with automatic streaming detection resp, finish_reason, response_obj = await self._get_completion(**kwargs)