diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_client.py b/python/packages/azure-ai/agent_framework_azure_ai/_client.py index 2e8edc7e47..0c7b9c9782 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_client.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_client.py @@ -421,6 +421,13 @@ async def _prepare_options( return run_options + @override + def _check_model_presence(self, run_options: dict[str, Any]) -> None: + if not run_options.get("model"): + if not self.model_id: + raise ValueError("model_deployment_name must be a non-empty string") + run_options["model"] = self.model_id + def _transform_input_for_azure_ai(self, input_items: list[dict[str, Any]]) -> list[dict[str, Any]]: """Transform input items to match Azure AI Projects expected schema. diff --git a/python/packages/core/agent_framework/_agents.py b/python/packages/core/agent_framework/_agents.py index aadd1be40a..f260b1a6d3 100644 --- a/python/packages/core/agent_framework/_agents.py +++ b/python/packages/core/agent_framework/_agents.py @@ -834,7 +834,7 @@ async def run( """ input_messages = self._normalize_messages(messages) thread, run_chat_options, thread_messages = await self._prepare_thread_and_messages( - thread=thread, input_messages=input_messages + thread=thread, input_messages=input_messages, **kwargs ) normalized_tools: list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] = ( # type:ignore[reportUnknownVariableType] [] if tools is None else tools if isinstance(tools, list) else [tools] diff --git a/python/packages/core/agent_framework/_tools.py b/python/packages/core/agent_framework/_tools.py index 07b11811f3..d72e9bb6dc 100644 --- a/python/packages/core/agent_framework/_tools.py +++ b/python/packages/core/agent_framework/_tools.py @@ -783,8 +783,10 @@ async def invoke( else: logger.info(f"Function {self.name} succeeded.") if OBSERVABILITY_SETTINGS.SENSITIVE_DATA_ENABLED: # type: ignore[name-defined] + from ._types import prepare_function_call_results + try: - json_result = json.dumps(result) + json_result = prepare_function_call_results(result) except (TypeError, OverflowError): span.set_attribute(OtelAttr.TOOL_RESULT, "") logger.debug("Function result: ") diff --git a/python/packages/core/agent_framework/azure/_responses_client.py b/python/packages/core/agent_framework/azure/_responses_client.py index 3f6140eeeb..c967a6d1b8 100644 --- a/python/packages/core/agent_framework/azure/_responses_client.py +++ b/python/packages/core/agent_framework/azure/_responses_client.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +import sys from collections.abc import Mapping from typing import Any, TypeVar from urllib.parse import urljoin @@ -18,6 +19,11 @@ AzureOpenAISettings, ) +if sys.version_info >= (3, 12): + from typing import override # type: ignore # pragma: no cover +else: + from typing_extensions import override # type: ignore[import] # pragma: no cover + TAzureOpenAIResponsesClient = TypeVar("TAzureOpenAIResponsesClient", bound="AzureOpenAIResponsesClient") @@ -144,3 +150,10 @@ def __init__( client=async_client, instruction_role=instruction_role, ) + + @override + def _check_model_presence(self, run_options: dict[str, Any]) -> None: + if not run_options.get("model"): + if not self.model_id: + raise ValueError("deployment_name must be a non-empty string") + run_options["model"] = self.model_id diff --git a/python/packages/core/agent_framework/openai/_responses_client.py b/python/packages/core/agent_framework/openai/_responses_client.py index 579452ef62..08c17c2619 100644 --- a/python/packages/core/agent_framework/openai/_responses_client.py +++ b/python/packages/core/agent_framework/openai/_responses_client.py @@ -401,10 +401,7 @@ async def _prepare_options( run_options["input"] = request_input # model id - if not run_options.get("model"): - if not self.model_id: - raise ValueError("model_id must be a non-empty string") - run_options["model"] = self.model_id + self._check_model_presence(run_options) # translations between ChatOptions and Responses API translations = { @@ -466,6 +463,16 @@ async def _prepare_options( return run_options + def _check_model_presence(self, run_options: dict[str, Any]) -> None: + """Check if the 'model' param is present, and if not raise a Error. + + Since AzureAIClients use a different param for this, this method is overridden in those clients. + """ + if not run_options.get("model"): + if not self.model_id: + raise ValueError("model_id must be a non-empty string") + run_options["model"] = self.model_id + def _get_current_conversation_id(self, chat_options: ChatOptions, **kwargs: Any) -> str | None: """Get the current conversation ID from chat options or kwargs.""" return chat_options.conversation_id or kwargs.get("conversation_id")