diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py index 2b987cdcf3f5..07b41e3d8da1 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py @@ -5,9 +5,10 @@ from typing import TYPE_CHECKING, Optional, Any -from .agent_framework import AgentFrameworkCBAgent -from .tool_client import ToolClient -from ._version import VERSION +from azure.ai.agentserver.agentframework.agent_framework import AgentFrameworkCBAgent +from azure.ai.agentserver.agentframework.tool_client import ToolClient +from azure.ai.agentserver.agentframework._version import VERSION +from azure.ai.agentserver.core.application._package_metadata import PackageMetadata, set_current_app if TYPE_CHECKING: # pragma: no cover from azure.core.credentials_async import AsyncTokenCredential @@ -22,3 +23,5 @@ def from_agent_framework(agent, __all__ = ["from_agent_framework", "ToolClient"] __version__ = VERSION + +set_current_app(PackageMetadata.from_dist("azure-ai-agentserver-agentframework")) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py index 233436ac84ea..b55c4aec3960 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py @@ -12,7 +12,7 @@ from agent_framework.azure import AzureAIClient # pylint: disable=no-name-in-module from opentelemetry import trace -from azure.ai.agentserver.core.client.tools import OAuthConsentRequiredError +from ..core.tools._exceptions import OAuthConsentRequiredError from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent from azure.ai.agentserver.core.constants import Constants as AdapterConstants from azure.ai.agentserver.core.logger import APPINSIGHT_CONNSTR_ENV_NAME, get_logger diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py index 8b7142f0862a..4db103577cbf 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py @@ -9,7 +9,7 @@ from pydantic import Field, create_model from azure.ai.agentserver.core.logger import get_logger if TYPE_CHECKING: - from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient, FoundryTool + from azure.ai.agentserver.core.tools import FoundryToolClient, ResolvedFoundryTool logger = get_logger() @@ -51,7 +51,7 @@ class ToolClient: :meta private: """ - def __init__(self, tool_client: "AzureAIToolClient") -> None: + def __init__(self, tool_client: "FoundryToolClient") -> None: """Initialize the ToolClient. :param tool_client: The AzureAIToolClient instance to use for tool operations. @@ -92,7 +92,7 @@ async def list_tools(self) -> List[AIFunction]: return self._aifunction_cache - def _convert_to_agent_framework_tool(self, azure_tool: "FoundryTool") -> AIFunction: + def _convert_to_agent_framework_tool(self, azure_tool: "ResolvedFoundryTool") -> AIFunction: """Convert an AzureAITool to an Agent Framework AI Function :param azure_tool: The AzureAITool to convert. diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/__init__.py similarity index 73% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/__init__.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/__init__.py index fdf8caba9ef5..28077537d94b 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/__init__.py @@ -2,4 +2,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_builder.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_builder.py new file mode 100644 index 000000000000..c09c253ab09f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_builder.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +class AgentServerBuilder: + pass diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_configuration.py new file mode 100644 index 000000000000..fe05dae18a67 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_configuration.py @@ -0,0 +1,42 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from dataclasses import dataclass, field + +from azure.core.credentials_async import AsyncTokenCredential + + +@dataclass(frozen=True) +class HttpServerConfiguration: + """Resolved configuration for the HTTP server. + + :ivar str host: The host address the server listens on. Defaults to '0.0.0.0'. + :ivar int port: The port number the server listens on. Defaults to 8088. + """ + + host: str = "0.0.0.0" + port: int = 8088 + + +class ToolsConfiguration: + """Resolved configuration for the Tools subsystem. + + :ivar int catalog_cache_ttl: The time-to-live (TTL) for the tool catalog cache in seconds. + Defaults to 600 seconds (10 minutes). + :ivar int catalog_cache_max_size: The maximum size of the tool catalog cache. + Defaults to 1024 entries. + """ + + catalog_cache_ttl: int = 600 + catalog_cache_max_size: int = 1024 + + +@dataclass(frozen=True, kw_only=True) +class AgentServerConfiguration: + """Resolved configuration for the Agent Server application.""" + + agent_name: str = "$default" + project_endpoint: str + credential: AsyncTokenCredential + http: HttpServerConfiguration = field(default_factory=HttpServerConfiguration) + tools: ToolsConfiguration = field(default_factory=ToolsConfiguration) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_options.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_options.py new file mode 100644 index 000000000000..cb4e8bde0bfd --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_options.py @@ -0,0 +1,44 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import Literal, NotRequired, TypedDict, Union + +from azure.core.credentials import TokenCredential +from azure.core.credentials_async import AsyncTokenCredential + + +class AgentServerOptions(TypedDict): + """Configuration options for the Agent Server. + + Attributes: + project_endpoint (str, optional): The endpoint URL for the project. Defaults to current project. + credential (Union[AsyncTokenCredential, TokenCredential], optional): The credential used for authentication. + Defaults to current project's managed identity. + """ + project_endpoint: NotRequired[str] + credential: NotRequired[Union[AsyncTokenCredential, TokenCredential]] + http: NotRequired["HttpServerOptions"] + toos: NotRequired["ToolsOptions"] + + +class HttpServerOptions(TypedDict): + """Configuration options for the HTTP server. + + Attributes: + host (str, optional): The host address the server listens on. + """ + host: NotRequired[Literal["127.0.0.1", "localhost", "0.0.0.0"]] + + +class ToolsOptions(TypedDict): + """Configuration options for the Tools subsystem. + + Attributes: + catalog_cache_ttl (int, optional): The time-to-live (TTL) for the tool catalog cache in seconds. + Defaults to 600 seconds (10 minutes). + catalog_cache_max_size (int, optional): The maximum size of the tool catalog cache. + Defaults to 1024 entries. + """ + catalog_cache_ttl: NotRequired[int] + catalog_cache_max_size: NotRequired[int] + diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_package_metadata.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_package_metadata.py new file mode 100644 index 000000000000..36ff9313a6a2 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_package_metadata.py @@ -0,0 +1,50 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import platform +from dataclasses import dataclass +from importlib.metadata import Distribution, PackageNotFoundError + + +@dataclass(frozen=True) +class PackageMetadata: + name: str + version: str + python_version: str + platform: str + + @staticmethod + def from_dist(dist_name: str): + try: + ver = Distribution.from_name(dist_name).version + except PackageNotFoundError: + ver = "" + + return PackageMetadata( + name=dist_name, + version=ver, + python_version=platform.python_version(), + platform=platform.platform(), + ) + + def as_user_agent(self, component: str | None = None) -> str: + return (f"{self.name}/{self.version} " + f"Python {self.python_version} " + f"{component} " if component else "" + f"({self.platform})") + + +_default = PackageMetadata.from_dist("azure-ai-agentserver-core") +_app: PackageMetadata = _default + + +def set_current_app(app: PackageMetadata) -> None: + global _app + _app = app + + +def get_current_app() -> PackageMetadata: + global _app + return _app diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/__init__.py deleted file mode 100644 index 3800740fb464..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from ._client import AzureAIToolClient, FoundryTool -from ._exceptions import OAuthConsentRequiredError, MCPToolApprovalRequiredError - -__all__ = [ - "AzureAIToolClient", - "FoundryTool", - "OAuthConsentRequiredError", - "MCPToolApprovalRequiredError", -] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_client.py deleted file mode 100644 index ee56a4d44a94..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_client.py +++ /dev/null @@ -1,195 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# pylint: disable=protected-access -from typing import Any, List, Mapping, Union -from azure.core import PipelineClient -from azure.core.pipeline import policies -from azure.core.credentials import TokenCredential -from azure.core.tracing.decorator import distributed_trace - -from ._configuration import AzureAIToolClientConfiguration -from .operations._operations import MCPToolsOperations, RemoteToolsOperations -from ._utils._model_base import InvocationPayloadBuilder -from ._model_base import FoundryTool, ToolSource - -class AzureAIToolClient: - """Synchronous client for aggregating tools from Azure AI MCP and Tools APIs. - - This client provides access to tools from both MCP (Model Context Protocol) servers - and Azure AI Tools API endpoints, enabling unified tool discovery and invocation. - - :param str endpoint: - The fully qualified endpoint for the Azure AI Agents service. - Example: "https://.api.azureml.ms" - :param credential: - Credential for authenticating requests to the service. - Use credentials from azure-identity like DefaultAzureCredential. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str agent_name: - Name of the agent to use for tool operations. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations defining which tools to include. - :keyword Mapping[str, Any] user: - User information for tool invocations (object_id, tenant_id). - :keyword str api_version: - API version to use when communicating with the service. - Default is the latest supported version. - :keyword transport: - Custom transport implementation. Default is RequestsTransport. - :paramtype transport: ~azure.core.pipeline.transport.HttpTransport - - """ - - def __init__( - self, - endpoint: str, - credential: "TokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the synchronous Azure AI Tool Client. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional keyword arguments for client configuration. - """ - self._config = AzureAIToolClientConfiguration( - endpoint, - credential, - **kwargs, - ) - - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: PipelineClient = PipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - # Initialize specialized clients with client and config - self._mcp_tools = MCPToolsOperations(client=self._client, config=self._config) - self._remote_tools = RemoteToolsOperations(client=self._client, config=self._config) - - def list_tools(self) -> List[FoundryTool]: - """List all available tools from configured sources. - - Retrieves tools from both MCP servers and Azure AI Tools API endpoints, - returning them as FoundryTool instances ready for invocation. - :return: List of available tools from all configured sources. - :rtype: List[~AzureAITool] - :raises ~exceptions.OAuthConsentRequiredError: - Raised when the service requires user OAuth consent. - :raises ~exceptions.MCPToolApprovalRequiredError: - Raised when tool access requires human approval. - :raises ~azure.core.exceptions.HttpResponseError: - Raised for HTTP communication failures. - - """ - - existing_names: set[str] = set() - - tools: List[FoundryTool] = [] - - # Fetch MCP tools - if ( - self._config.tool_config._named_mcp_tools - and len(self._config.tool_config._named_mcp_tools) > 0 - ): - mcp_tools = self._mcp_tools.list_tools(existing_names) - tools.extend(mcp_tools) - - # Fetch Tools API tools - if ( - self._config.tool_config._remote_tools - and len(self._config.tool_config._remote_tools) > 0 - ): - tools_api_tools = self._remote_tools.resolve_tools(existing_names) - tools.extend(tools_api_tools) - - for tool in tools: - # Capture tool in a closure to avoid shadowing issues - def make_invoker(captured_tool): - return lambda *args, **kwargs: self.invoke_tool(captured_tool, *args, **kwargs) - tool.invoker = make_invoker(tool) - return tools - - @distributed_trace - def invoke_tool( - self, - tool: Union[str, FoundryTool], - *args: Any, - **kwargs: Any, - ) -> Any: - """Invoke a tool by instance, name, or descriptor. - - :param tool: Tool to invoke, specified as an AzureAITool instance, - tool name string, or FoundryTool. - :type tool: Union[str, ~FoundryTool] - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result of invoking the tool. - :rtype: Any - """ - descriptor = self._resolve_tool_descriptor(tool) - payload = InvocationPayloadBuilder.build_payload(args, kwargs, configuration={}) - return self._invoke_tool(descriptor, payload, **kwargs) - - def _resolve_tool_descriptor( - self, tool: Union[str, FoundryTool] - ) -> FoundryTool: - """Resolve a tool reference to a descriptor. - - :param tool: Tool to resolve, either a FoundryTool instance or a string name/key. - :type tool: Union[str, FoundryTool] - :return: The resolved FoundryTool descriptor. - :rtype: FoundryTool - """ - if isinstance(tool, FoundryTool): - return tool - if isinstance(tool, str): - # Fetch all tools and find matching descriptor - descriptors = self.list_tools() - for descriptor in descriptors: - if tool in (descriptor.name, descriptor.key): - return descriptor - raise KeyError(f"Unknown tool: {tool}") - raise TypeError("Tool must be an AzureAITool, FoundryTool, or registered name/key") - - def _invoke_tool(self, descriptor: FoundryTool, arguments: Mapping[str, Any], **kwargs: Any) -> Any: - """Invoke a tool descriptor. - - :param descriptor: The tool descriptor to invoke. - :type descriptor: FoundryTool - :param arguments: Arguments to pass to the tool. - :type arguments: Mapping[str, Any] - :return: The result of the tool invocation. - :rtype: Any - """ - if descriptor.source is ToolSource.MCP_TOOLS: - return self._mcp_tools.invoke_tool(descriptor, arguments, **kwargs) - if descriptor.source is ToolSource.REMOTE_TOOLS: - return self._remote_tools.invoke_tool(descriptor, arguments, **kwargs) - raise ValueError(f"Unsupported tool source: {descriptor.source}") - - def close(self) -> None: - self._client.close() - - def __enter__(self) -> "AzureAIToolClient": - self._client.__enter__() - return self - - def __exit__(self, *exc_details: Any) -> None: - self._client.__exit__(*exc_details) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_configuration.py deleted file mode 100644 index 71cbdebec911..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_configuration.py +++ /dev/null @@ -1,85 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from typing import Any, List, Optional, TYPE_CHECKING - -from azure.core.pipeline import policies -from ._utils._model_base import ToolConfigurationParser, UserInfo, ToolDefinition - -if TYPE_CHECKING: - from azure.core.credentials import TokenCredential - -class AzureAIToolClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for Azure AI Tool Client. - - Manages authentication, endpoint configuration, and policy settings for the - Azure AI Tool Client. This class is used internally by the client and should - not typically be instantiated directly. - - :param str endpoint: - Fully qualified endpoint for the Azure AI Agents service. - :param credential: - Azure TokenCredential for authentication. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str api_version: - API version to use. Default is the latest supported version. - :keyword List[str] credential_scopes: - OAuth2 scopes for token requests. Default is ["https://ai.azure.com/.default"]. - :keyword str agent_name: - Name of the agent. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations. - :keyword Mapping[str, Any] user: - User information for tool invocations. - """ - - def __init__( - self, - endpoint: str, - credential: "TokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the configuration. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional configuration options. - """ - api_version: str = kwargs.pop("api_version", "2025-05-15-preview") - - self.endpoint = endpoint - self.credential = credential - self.api_version = api_version - self.credential_scopes = kwargs.pop("credential_scopes", ["https://ai.azure.com/.default"]) - - # Tool configuration - self.agent_name: str = kwargs.pop("agent_name", "$default") - self.tools: Optional[List[ToolDefinition]] = kwargs.pop("tools", None) - self.user: Optional[UserInfo] = kwargs.pop("user", None) - - # Initialize tool configuration parser - self.tool_config = ToolConfigurationParser(self.tools) - - self._configure(**kwargs) - - # Warn about unused kwargs - if kwargs: - import warnings - warnings.warn(f"Unused configuration parameters: {list(kwargs.keys())}", UserWarning) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy( - self.credential, *self.credential_scopes, **kwargs - ) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_exceptions.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_exceptions.py deleted file mode 100644 index 41515592d698..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_exceptions.py +++ /dev/null @@ -1,52 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from typing import Any, Mapping, Optional - - -class OAuthConsentRequiredError(RuntimeError): - """Raised when the service requires end-user OAuth consent. - - This exception is raised when a tool or service operation requires explicit - OAuth consent from the end user before the operation can proceed. - - :ivar str message: Human-readable guidance returned by the service. - :ivar str consent_url: Link that the end user must visit to provide consent. - :ivar dict payload: Full response payload from the service. - - :param str message: Human-readable guidance returned by the service. - :param str consent_url: Link that the end user must visit to provide the required consent. - :param dict payload: Full response payload supplied by the service. - """ - - def __init__(self, message: str, consent_url: Optional[str], payload: Mapping[str, Any]): - super().__init__(message) - self.message = message - self.consent_url = consent_url - self.payload = dict(payload) - - -class MCPToolApprovalRequiredError(RuntimeError): - """Raised when an MCP tool invocation needs human approval. - - This exception is raised when an MCP (Model Context Protocol) tool requires - explicit human approval before the invocation can proceed, typically for - security or compliance reasons. - - :ivar str message: Human-readable guidance returned by the service. - :ivar dict approval_arguments: - Arguments that must be approved or amended before continuing. - :ivar dict payload: Full response payload from the service. - - :param str message: Human-readable guidance returned by the service. - :param dict approval_arguments: - Arguments that must be approved or amended before continuing. - :param dict payload: Full response payload supplied by the service. - """ - - def __init__(self, message: str, approval_arguments: Mapping[str, Any], payload: Mapping[str, Any]): - super().__init__(message) - self.message = message - self.approval_arguments = dict(approval_arguments) - self.payload = dict(payload) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_model_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_model_base.py deleted file mode 100644 index 7e20b20edeb0..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_model_base.py +++ /dev/null @@ -1,174 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from enum import Enum -import json - -from typing import Any, Awaitable, Callable, Mapping, Optional -from dataclasses import dataclass -import asyncio # pylint: disable=do-not-import-asyncio -import inspect -from azure.core import CaseInsensitiveEnumMeta - -class ToolSource(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Identifies the origin of a tool. - - Specifies whether a tool comes from an MCP (Model Context Protocol) server - or from the Azure AI Tools API (remote tools). - """ - - MCP_TOOLS = "mcp_tools" - REMOTE_TOOLS = "remote_tools" - -class ToolDefinition: - """Definition of a tool including its parameters. - - :ivar str type: JSON schema type (e.g., "mcp", "a2", other tools). - """ - - def __init__(self, type: str, **kwargs: Any) -> None: - """Initialize ToolDefinition with type and any additional properties. - - :param str type: JSON schema type (e.g., "mcp", "a2", other tools). - :param kwargs: Any additional properties to set on the tool definition. - """ - self.type = type - # Store all additional properties as attributes - for key, value in kwargs.items(): - setattr(self, key, value) - - def __repr__(self) -> str: - """Return a detailed string representation of the ToolDefinition. - - :return: JSON string representation of the ToolDefinition. - :rtype: str - """ - return json.dumps(self.__dict__, default=str) - - def __str__(self) -> str: - """Return a human-readable string representation. - - :return: JSON string representation of the ToolDefinition. - :rtype: str - """ - return json.dumps(self.__dict__, default=str) - -@dataclass -class FoundryTool: - """Lightweight description of a tool that can be invoked. - - Represents metadata and configuration for a single tool, including its - name, description, input schema, and source information. - - :ivar str key: Unique identifier for this tool. - :ivar str name: Display name of the tool. - :ivar str description: Human-readable description of what the tool does. - :ivar ~ToolSource source: - Origin of the tool (MCP_TOOLS or REMOTE_TOOLS). - :ivar dict metadata: Raw metadata from the API response. - :ivar dict input_schema: - JSON schema describing the tool's input parameters, or None. - :ivar ToolDefinition tool_definition: - Optional tool definition object, or None. - """ - - key: str - name: str - description: str - source: ToolSource - metadata: Mapping[str, Any] - input_schema: Optional[Mapping[str, Any]] = None - tool_definition: Optional[ToolDefinition] = None - invoker: Optional[Callable[..., Awaitable[Any]]] = None - - def invoke(self, *args: Any, **kwargs: Any) -> Any: - """Invoke the tool synchronously. - - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result from the tool invocation. - :rtype: Any - """ - - if not self.invoker: - raise NotImplementedError("No invoker function defined for this tool.") - if inspect.iscoroutinefunction(self.invoker): - # If the invoker is async, check if we're already in an event loop - try: - asyncio.get_running_loop() - # We're in a running loop, can't use asyncio.run() - raise RuntimeError( - "Cannot call invoke() on an async tool from within an async context. " - "Use 'await tool.ainvoke(...)' or 'await tool(...)' instead." - ) - except RuntimeError as e: - if "no running event loop" in str(e).lower(): - # No running loop, safe to use asyncio.run() - return asyncio.run(self.invoker(*args, **kwargs)) - # Re-raise our custom error - raise - else: - return self.invoker(*args, **kwargs) - - async def ainvoke(self, *args: Any, **kwargs: Any) -> Any: - """Invoke the tool asynchronously. - - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result from the tool invocation. - :rtype: Any - """ - - if not self.invoker: - raise NotImplementedError("No invoker function defined for this tool.") - if inspect.iscoroutinefunction(self.invoker): - return await self.invoker(*args, **kwargs) - - result = self.invoker(*args, **kwargs) - # If the result is awaitable (e.g., a coroutine), await it - if inspect.iscoroutine(result) or hasattr(result, '__await__'): - return await result - return result - - def __call__(self, *args: Any, **kwargs: Any) -> Any: - - # Check if the invoker is async - if self.invoker and inspect.iscoroutinefunction(self.invoker): - # Return coroutine for async context - return self.ainvoke(*args, **kwargs) - - # Use sync invoke - return self.invoke(*args, **kwargs) - - -class UserInfo: - """Represents user information. - - :ivar str objectId: User's object identifier. - :ivar str tenantId: Tenant identifier. - """ - - def __init__(self, objectId: str, tenantId: str, **kwargs: Any) -> None: - """Initialize UserInfo with user details. - - :param str objectId: User's object identifier. - :param str tenantId: Tenant identifier. - :param kwargs: Any additional properties to set on the user. - """ - self.objectId = objectId - self.tenantId = tenantId - # Store all additional properties as attributes - for key, value in kwargs.items(): - setattr(self, key, value) - - def to_dict(self) -> dict: - """Convert to dictionary for JSON serialization. - - :return: Dictionary containing objectId and tenantId. - :rtype: dict - """ - return { - "objectId": self.objectId, - "tenantId": self.tenantId - } diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_utils/_model_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_utils/_model_base.py deleted file mode 100644 index e06ef576264e..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_utils/_model_base.py +++ /dev/null @@ -1,796 +0,0 @@ - -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: ignore-errors - -from dataclasses import dataclass, asdict, is_dataclass -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple - -from .._model_base import ToolDefinition, FoundryTool, ToolSource, UserInfo - - - -class ToolDescriptorBuilder: - """Builds FoundryTool objects from raw tool data.""" - - @staticmethod - def build_descriptors( - raw_tools: Iterable[Mapping[str, Any]], - source: ToolSource, - existing_names: Set[str], - ) -> List[FoundryTool]: - """Build tool descriptors from raw tool data. - - Parameters - ---------- - raw_tools : Iterable[Mapping[str, Any]] - Raw tool data from API (can be dicts or dataclass objects) - source : ToolSource - Source of the tools - existing_names : Set[str] - Set of existing tool names to avoid conflicts - - Returns - ------- - List[FoundryTool] - List of built tool descriptors - """ - descriptors: List[FoundryTool] = [] - for raw in raw_tools: - # Convert dataclass objects to dictionaries - if is_dataclass(raw) and not isinstance(raw, type): - raw = asdict(raw) - - name, description = ToolMetadataExtractor.extract_name_description(raw) - if not name: - continue - - key = ToolMetadataExtractor.derive_tool_key(raw, source) - description = description or "" - resolved_name = NameResolver.ensure_unique_name(name, existing_names) - - descriptor = FoundryTool( - key=key, - name=resolved_name, - description=description, - source=source, - metadata=dict(raw), - input_schema=ToolMetadataExtractor.extract_input_schema(raw), - tool_definition= raw.get("tool_definition") - ) - descriptors.append(descriptor) - existing_names.add(resolved_name) - - return descriptors - - -class ToolMetadataExtractor: - """Extracts metadata from raw tool data.""" - - @staticmethod - def extract_name_description(raw: Mapping[str, Any]) -> Tuple[Optional[str], Optional[str]]: - """Extract name and description from raw tool data. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - - Returns - ------- - Tuple[Optional[str], Optional[str]] - Tuple of (name, description) - """ - name = ( - raw.get("name") - or raw.get("id") - or raw.get("tool_name") - or raw.get("definition", {}).get("name") - or raw.get("tool", {}).get("name") - ) - description = ( - raw.get("description") - or raw.get("long_description") - or raw.get("definition", {}).get("description") - or raw.get("tool", {}).get("description") - ) - return name, description - - @staticmethod - def derive_tool_key(raw: Mapping[str, Any], source: ToolSource) -> str: - """Derive unique key for a tool. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - source : ToolSource - Source of the tool - - Returns - ------- - str - Unique tool key - """ - for candidate in (raw.get("id"), raw.get("name"), raw.get("tool_name")): - if candidate: - return f"{source.value}:{candidate}" - return f"{source.value}:{id(raw)}" - - @staticmethod - def extract_input_schema(raw: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: - """Extract input schema from raw tool data. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - - Returns - ------- - Optional[Mapping[str, Any]] - Input schema if found - """ - for key in ("input_schema", "inputSchema", "schema", "parameters"): - if key in raw and isinstance(raw[key], Mapping): - return raw[key] - nested = raw.get("definition") or raw.get("tool") - if isinstance(nested, Mapping): - return ToolMetadataExtractor.extract_input_schema(nested) - return None - - @staticmethod - def extract_metadata_schema(raw: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: - """Extract input schema from raw tool data. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - - Returns - ------- - Optional[Mapping[str, Any]] - _metadata if found - """ - for key in ("_meta", "metadata", "meta"): - if key in raw and isinstance(raw[key], Mapping): - return raw[key] - return None - - -class NameResolver: - """Resolves tool names to ensure uniqueness.""" - - @staticmethod - def ensure_unique_name(proposed_name: str, existing_names: Set[str]) -> str: - """Ensure a tool name is unique. - - Parameters - ---------- - proposed_name : str - Proposed tool name - existing_names : Set[str] - Set of existing tool names - - Returns - ------- - str - Unique tool name - """ - if proposed_name not in existing_names: - return proposed_name - - suffix = 1 - while True: - candidate = f"{proposed_name}_{suffix}" - if candidate not in existing_names: - return candidate - suffix += 1 - - -class MetadataMapper: - """Maps tool metadata from _meta schema to tool configuration.""" - - # Default key mapping: meta_schema_key -> output_key - # Note: When used with key_overrides, the direction is reversed internally - # to support tool_def_key -> meta_schema_key mapping - DEFAULT_KEY_MAPPING = { - "imagegen_model_deployment_name": "model_deployment_name", - "model_deployment_name": "model", - "deployment_name": "model", - } - - @staticmethod - def extract_metadata_config( - tool_metadata: Mapping[str, Any], - tool_definition: Optional[Mapping[str, Any]] = None, - key_overrides: Optional[Mapping[str, str]] = None, - ) -> Dict[str, Any]: - """Extract metadata configuration from _meta schema and tool definition. - - This method extracts properties defined in the _meta schema and attempts - to find matching values in the tool definition. Key overrides allow mapping - from tool definition property names to _meta schema property names. - - Parameters - ---------- - tool_metadata : Mapping[str, Any] - The _meta schema containing property definitions - tool_definition : Optional[Mapping[str, Any]] - The tool definition containing actual values - key_overrides : Optional[Mapping[str, str]] - Mapping from tool definition keys to _meta schema keys. - Format: {"tool_def_key": "meta_schema_key"} - Example: {"model": "imagegen_model_deployment_name"} - - Returns - ------- - Dict[str, Any] - Dictionary with mapped metadata configuration - - Examples - -------- - >>> meta_schema = { - ... "properties": { - ... "quality": {"type": "string", "default": "auto"}, - ... "model_deployment_name": {"type": "string"} - ... } - ... } - >>> tool_def = {"quality": "high", "model": "gpt-4"} - >>> overrides = {"model": "model_deployment_name"} # tool_def -> meta - >>> MetadataMapper.extract_metadata_config(meta_schema, tool_def, overrides) - {'quality': 'high', 'model_deployment_name': 'gpt-4'} - """ - result: Dict[str, Any] = {} - - # Build reverse mapping: tool_definition_key -> meta_property_name - # Start with default mappings (also reversed) - reverse_default_mapping = {v: k for k, v in MetadataMapper.DEFAULT_KEY_MAPPING.items()} - - # Add user overrides (these are already tool_def -> meta format) - tool_to_meta_mapping = dict(reverse_default_mapping) - if key_overrides: - tool_to_meta_mapping.update(key_overrides) - - # Extract properties from _meta schema - properties = tool_metadata.get("properties", {}) - if not isinstance(properties, Mapping): - return result - - for meta_prop_name, prop_schema in properties.items(): - if not isinstance(prop_schema, Mapping): - continue - - is_required = meta_prop_name in tool_metadata.get("required", []) - - # Try to find value in tool definition - value = None - value_from_definition = False - - if tool_definition: - # First check if tool definition has this exact key - if meta_prop_name in tool_definition: - value = tool_definition[meta_prop_name] - value_from_definition = True - else: - # Check if any tool definition key maps to this meta property - for tool_key, mapped_meta_key in tool_to_meta_mapping.items(): - if mapped_meta_key == meta_prop_name and tool_key in tool_definition: - value = tool_definition[tool_key] - value_from_definition = True - break - - # If no value from definition, check for default (only use if required) - if value is None and is_required and "default" in prop_schema: - value = prop_schema["default"] - - # Only add if: - # 1. Value is from tool definition, OR - # 2. Value is required and has a default - if value is not None and (value_from_definition or is_required): - result[meta_prop_name] = value - - return result - - @staticmethod - def prepare_metadata_dict( - tool_metadata_raw: Mapping[str, Any], - tool_definition: Optional[Mapping[str, Any]] = None, - key_overrides: Optional[Mapping[str, str]] = None, - ) -> Dict[str, Any]: - """Prepare a _meta dictionary from tool metadata and definition. - - This is a convenience method that extracts the _meta schema from - raw tool metadata and maps it to configuration values. - - Parameters - ---------- - tool_metadata_raw : Mapping[str, Any] - Raw tool metadata containing _meta or similar fields - tool_definition : Optional[Mapping[str, Any]] - The tool definition containing actual values - key_overrides : Optional[Mapping[str, str]] - Mapping from tool definition keys to _meta schema keys. - Format: {"tool_def_key": "meta_schema_key"} - - Returns - ------- - Dict[str, Any] - Dictionary with mapped metadata configuration - """ - # Extract _meta schema using existing utility - meta_schema = ToolMetadataExtractor.extract_metadata_schema(tool_metadata_raw) - if not meta_schema: - return {} - - return MetadataMapper.extract_metadata_config( - meta_schema, - tool_definition, - key_overrides - ) - - -class InvocationPayloadBuilder: - """Builds invocation payloads for tool calls.""" - - @staticmethod - def build_payload( - args: Tuple[Any, ...], - kwargs: Dict[str, Any], - configuration: Dict[str, Any], - ) -> Dict[str, Any]: - """Build invocation payload from args and kwargs. - - Parameters - ---------- - args : Tuple[Any, ...] - Positional arguments - kwargs : Dict[str, Any] - Keyword arguments - configuration : Dict[str, Any] - Tool configuration defaults - - Returns - ------- - Dict[str, Any] - Complete invocation payload - """ - user_arguments = InvocationPayloadBuilder._normalize_input(args, kwargs) - merged = dict(configuration) - merged.update(user_arguments) - return merged - - @staticmethod - def _normalize_input( - args: Tuple[Any, ...], - kwargs: Dict[str, Any] - ) -> Dict[str, Any]: - """Normalize invocation input to a dictionary. - - Parameters - ---------- - args : Tuple[Any, ...] - Positional arguments - kwargs : Dict[str, Any] - Keyword arguments - - Returns - ------- - Dict[str, Any] - Normalized input dictionary - - Raises - ------ - ValueError - If mixing positional and keyword arguments or providing multiple positional args - """ - if args and kwargs: - raise ValueError("Mixing positional and keyword arguments is not supported") - - if args: - if len(args) > 1: - raise ValueError("Multiple positional arguments are not supported") - candidate = next(iter(args)) - if candidate is None: - return {} - if isinstance(candidate, Mapping): - return dict(candidate) - return {"input": candidate} - - if kwargs: - return dict(kwargs) - - return {} - - -@dataclass -class ToolProperty: - """Represents a single property/parameter in a tool's schema. - - :ivar str type: JSON schema type (e.g., "string", "object", "array"). - :ivar Optional[str] description: Human-readable description of the property. - :ivar Optional[Mapping[str, Any]] properties: Nested properties for object types. - :ivar Any default: Default value for the property. - :ivar List[str] required: List of required nested properties. - """ - - type: str - description: Optional[str] = None - properties: Optional[Mapping[str, Any]] = None - default: Any = None - required: Optional[List[str]] = None - -@dataclass -class ToolParameters: - """Represents the parameters schema for a tool. - - :ivar str type: JSON schema type, typically "object". - :ivar Mapping[str, ToolProperty] properties: Dictionary of parameter properties. - :ivar List[str] required: List of required parameter names. - """ - - type: str - properties: Mapping[str, ToolProperty] - required: Optional[List[str]] = None - -@dataclass -class ToolManifest: - """Represents a tool manifest with metadata and parameters. - - :ivar str name: Unique name of the tool. - :ivar str description: Detailed description of the tool's functionality. - :ivar ToolParameters parameters: Schema defining the tool's input parameters. - """ - - name: str - description: str - parameters: ToolParameters - -@dataclass -class RemoteServer: - """Represents remote server configuration for a tool. - - :ivar str projectConnectionId: Identifier for the project connection. - :ivar str protocol: Communication protocol (e.g., "mcp"). - """ - - projectConnectionId: str - protocol: str - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON serialization.""" - return { - "projectConnectionId": self.projectConnectionId, - "protocol": self.protocol - } - -@dataclass -class EnrichedToolEntry(ToolManifest): - """Enriched tool representation with input schema. - - :ivar str name: Name of the tool. - :ivar str description: Description of the tool. - """ - remoteServer: RemoteServer - projectConnectionId: str - protocol: str - inputSchema: Optional[Mapping[str, Any]] = None - tool_definition: Optional[ToolDefinition] = None - -@dataclass -class ToolEntry: - """Represents a single tool entry in the API response. - - :ivar RemoteServer remoteServer: Configuration for the remote server. - :ivar List[ToolManifest] manifest: List of tool manifests provided by this entry. - """ - - remoteServer: RemoteServer - manifest: List[ToolManifest] - -@dataclass -class ToolsResponse: - """Root response model for the tools API. - - :ivar List[ToolEntry] tools: List of tool entries from the API. - """ - - tools: List[ToolEntry] - enriched_tools: List[EnrichedToolEntry] - - @classmethod - def from_dict(cls, data: Mapping[str, Any], tool_definitions: List[ToolDefinition]) -> "ToolsResponse": - """Create a ToolsResponse from a dictionary. - - :param Mapping[str, Any] data: Dictionary representation of the API response. - :return: Parsed ToolsResponse instance. - :rtype: ToolsResponse - """ - tool_defintions_map = {f"{td.type.lower()}_{td.project_connection_id.lower()}": td for td in tool_definitions} - - def tool_definition_lookup(remote_server: RemoteServer) -> Optional[ToolDefinition]: - return tool_defintions_map.get(f"{remote_server.protocol.lower()}_{remote_server.projectConnectionId.lower()}") - - - tools = [] - flattend_tools = [] - for tool_data in data.get("tools", []): - remote_server = RemoteServer( - projectConnectionId=tool_data["remoteServer"]["projectConnectionId"], - protocol=tool_data["remoteServer"]["protocol"] - ) - - manifests = [] - for manifest_data in tool_data.get("manifest", []): - params_data = manifest_data.get("parameters", {}) - properties = {} - - for prop_name, prop_data in params_data.get("properties", {}).items(): - properties[prop_name] = ToolProperty( - type=prop_data.get("type"), - description=prop_data.get("description"), - properties=prop_data.get("properties"), - default=prop_data.get("default"), - required=prop_data.get("required") - ) - - parameters = ToolParameters( - type=params_data.get("type", "object"), - properties=properties, - required=params_data.get("required") - ) - manifest = ToolManifest( - name=manifest_data["name"], - description=manifest_data["description"], - parameters=parameters - ) - manifests.append(manifest) - tool_definition = tool_definition_lookup(remote_server) - flattend_tools.append(EnrichedToolEntry( - projectConnectionId=remote_server.projectConnectionId, - protocol=remote_server.protocol, - name=manifest.name, - description=manifest.description, - parameters=parameters, - remoteServer=remote_server, - inputSchema=parameters, - tool_definition=tool_definition - )) - - tools.append(ToolEntry( - remoteServer=remote_server, - manifest=manifests - )) - - return cls(tools=tools, enriched_tools=flattend_tools) - -class ResolveToolsRequest: - """Represents a request containing remote servers and user information. - - :ivar List[RemoteServer] remoteservers: List of remote server configurations. - :ivar UserInfo user: User information. - """ - - def __init__(self, remoteservers: List[RemoteServer], user: UserInfo) -> None: - """Initialize RemoteServersRequest with servers and user info. - - :param List[RemoteServer] remoteservers: List of remote server configurations. - :param UserInfo user: User information. - """ - self.remoteservers = remoteservers - self.user: UserInfo = user - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON serialization.""" - result = { - "remoteservers": [rs.to_dict() for rs in self.remoteservers] - } - if self.user: - # Handle both UserInfo objects and dictionaries - if isinstance(self.user, dict): - # Validate required fields for dict - if self.user.get("objectId") and self.user.get("tenantId"): - result["user"] = { - "objectId": self.user["objectId"], - "tenantId": self.user["tenantId"] - } - elif hasattr(self.user, "objectId") and hasattr(self.user, "tenantId"): - # UserInfo object - if self.user.objectId and self.user.tenantId: - result["user"] = { - "objectId": self.user.objectId, - "tenantId": self.user.tenantId - } - return result - - -class ToolConfigurationParser: - """Parses and processes tool configuration. - - This class handles parsing and categorizing tool configurations into - remote tools (MCP/A2A) and named MCP tools. - - :param List[Mapping[str, Any]] tools_config: - List of tool configurations to parse. Can be None. - """ - - def __init__(self, tools_definitions: Optional[List[Any]] = None): - """Initialize the parser. - - :param tools_definitions: List of tool configurations (can be dicts or ToolDefinition objects), or None. - :type tools_definitions: Optional[List[Any]] - """ - # Convert dictionaries to ToolDefinition objects if needed - self._tools_definitions = [] - for tool_def in (tools_definitions or []): - if isinstance(tool_def, dict): - # Convert dict to ToolDefinition - tool_type = tool_def.get("type") - if tool_type: - self._tools_definitions.append(ToolDefinition(type=tool_type, **{k: v for k, v in tool_def.items() if k != "type"})) - elif isinstance(tool_def, ToolDefinition): - self._tools_definitions.append(tool_def) - - self._remote_tools: List[ToolDefinition] = [] - self._named_mcp_tools: List[ToolDefinition] = [] - self._parse_tools_config() - - def _parse_tools_config(self) -> None: - """Parse tools configuration into categorized lists. - - Separates tool configurations into remote tools (MCP/A2A types) and - named MCP tools based on the 'type' field in each configuration. - """ - for tool_definition in self._tools_definitions: - tool_type = tool_definition.type.lower() - if tool_type in ["mcp", "a2a"]: - self._remote_tools.append(tool_definition) - else: - self._named_mcp_tools.append(tool_definition) - -def to_remote_server(tool_definition: ToolDefinition) -> RemoteServer: - """Convert ToolDefinition to RemoteServer. - - :param ToolDefinition tool_definition: - Tool definition to convert. - :return: Converted RemoteServer instance. - :rtype: RemoteServer - """ - return RemoteServer( - projectConnectionId=tool_definition.project_connection_id, - protocol=tool_definition.type.lower() - ) - - -@dataclass -class MCPToolSchema: - """Represents the input schema for an MCP tool. - - :ivar str type: JSON schema type, typically "object". - :ivar Mapping[str, Any] properties: Dictionary of parameter properties. - :ivar List[str] required: List of required parameter names. - """ - - type: str - properties: Mapping[str, Any] - required: Optional[List[str]] = None - - -@dataclass -class MCPToolMetadata: - """Represents the _meta field for an MCP tool. - - :ivar str type: JSON schema type, typically "object". - :ivar Mapping[str, Any] properties: Dictionary of metadata properties. - :ivar List[str] required: List of required metadata parameter names. - """ - - type: str - properties: Mapping[str, Any] - required: Optional[List[str]] = None - - -@dataclass -class MCPTool: - """Represents a single MCP tool from the tools/list response. - - :ivar str name: Unique name of the tool. - :ivar str title: Display title of the tool. - :ivar str description: Detailed description of the tool's functionality. - :ivar MCPToolSchema inputSchema: Schema defining the tool's input parameters. - :ivar Optional[MCPToolMetadata] _meta: Optional metadata schema for the tool. - """ - - name: str - title: str - description: str - inputSchema: MCPToolSchema - _meta: Optional[MCPToolMetadata] = None - -@dataclass -class EnrichedMCPTool(MCPTool): - """Represents an enriched MCP tool with additional metadata. - - :ivar ToolDefinition tool_definition: Associated tool definition. - """ - tool_definition: Optional[ToolDefinition] = None - -@dataclass -class MCPToolsListResult: - """Represents the result field of an MCP tools/list response. - - :ivar List[MCPTool] tools: List of available MCP tools. - """ - - tools: List[MCPTool] - - -@dataclass -class MCPToolsListResponse: - """Root response model for the MCP tools/list JSON-RPC response. - - :ivar str jsonrpc: JSON-RPC protocol version (e.g., "2.0"). - :ivar int id: Request identifier. - :ivar MCPToolsListResult result: Result containing the list of tools. - """ - - jsonrpc: str - id: int - result: MCPToolsListResult - - @classmethod - def from_dict(cls, data: Mapping[str, Any], tool_definitions: List[ToolDefinition]) -> "MCPToolsListResponse": - """Create an MCPToolsListResponse from a dictionary. - - :param Mapping[str, Any] data: Dictionary representation of the JSON-RPC response. - :return: Parsed MCPToolsListResponse instance. - :rtype: MCPToolsListResponse - """ - result_data = data.get("result", {}) - tools_list = [] - tool_definitions_map = {f"{td.type.lower()}": td for td in tool_definitions} - filter_tools = len(tool_definitions_map) > 0 - for tool_data in result_data.get("tools", []): - - if filter_tools and tool_data["name"].lower() not in tool_definitions_map: - continue - # Parse inputSchema - input_schema_data = tool_data.get("inputSchema", {}) - input_schema = MCPToolSchema( - type=input_schema_data.get("type", "object"), - properties=input_schema_data.get("properties", {}), - required=input_schema_data.get("required") - ) - - # Parse _meta if present - meta = None - meta_data = tool_data.get("_meta") - if meta_data: - meta = MCPToolMetadata( - type=meta_data.get("type", "object"), - properties=meta_data.get("properties", {}), - required=meta_data.get("required") - ) - - # Create MCPTool - mcp_tool = EnrichedMCPTool( - name=tool_data["name"], - title=tool_data.get("title", tool_data["name"]), - description=tool_data.get("description", ""), - inputSchema=input_schema, - _meta=meta, - tool_definition=tool_definitions_map.get(tool_data["name"].lower()) - ) - - tools_list.append(mcp_tool) - - # Create result - result = MCPToolsListResult(tools=tools_list) - - return cls( - jsonrpc=data.get("jsonrpc", "2.0"), - id=data.get("id", 0), - result=result - ) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/__init__.py deleted file mode 100644 index 047a3b7919e7..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from ._client import AzureAIToolClient, FoundryTool -from .._exceptions import OAuthConsentRequiredError, MCPToolApprovalRequiredError - -__all__ = [ - "AzureAIToolClient", - "FoundryTool", - "OAuthConsentRequiredError", - "MCPToolApprovalRequiredError", -] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_client.py deleted file mode 100644 index 986e8756e1b6..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_client.py +++ /dev/null @@ -1,207 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# pylint: disable=protected-access,do-not-import-asyncio -from typing import Any, List, Mapping, Union, TYPE_CHECKING -from asyncio import gather -from azure.core import AsyncPipelineClient -from azure.core.pipeline import policies -from azure.core.tracing.decorator_async import distributed_trace_async - -from ._configuration import AzureAIToolClientConfiguration -from .._utils._model_base import InvocationPayloadBuilder -from .._model_base import FoundryTool, ToolSource - -from .operations._operations import MCPToolsOperations, RemoteToolsOperations - -if TYPE_CHECKING: - from azure.core.credentials_async import AsyncTokenCredential - -class AzureAIToolClient: - """Asynchronous client for aggregating tools from Azure AI MCP and Tools APIs. - - This client provides access to tools from both MCP (Model Context Protocol) servers - and Azure AI Tools API endpoints, enabling unified tool discovery and invocation. - - :param str endpoint: - The fully qualified endpoint for the Azure AI Agents service. - Example: "https://.api.azureml.ms" - :param credential: - Credential for authenticating requests to the service. - Use credentials from azure-identity like DefaultAzureCredential. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str agent_name: - Name of the agent to use for tool operations. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations defining which tools to include. - :keyword Mapping[str, Any] user: - User information for tool invocations (object_id, tenant_id). - :keyword str api_version: - API version to use when communicating with the service. - Default is the latest supported version. - :keyword transport: - Custom transport implementation. Default is RequestsTransport. - :paramtype transport: ~azure.core.pipeline.transport.HttpTransport - - """ - - def __init__( - self, - endpoint: str, - credential: "AsyncTokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the asynchronous Azure AI Tool Client. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional keyword arguments for client configuration. - """ - self._config = AzureAIToolClientConfiguration( - endpoint, - credential, - **kwargs, - ) - - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - # Initialize specialized clients with client and config - self._mcp_tools = MCPToolsOperations(client=self._client, config=self._config) - self._remote_tools = RemoteToolsOperations(client=self._client, config=self._config) - - async def list_tools(self) -> List[FoundryTool]: - """List all available tools from configured sources. - - Retrieves tools from both MCP servers and Azure AI Tools API endpoints, - returning them as AzureAITool instances ready for invocation. - :return: List of available tools from all configured sources. - :rtype: List[~AzureAITool] - :raises ~Tool_Client.exceptions.OAuthConsentRequiredError: - Raised when the service requires user OAuth consent. - :raises ~Tool_Client.exceptions.MCPToolApprovalRequiredError: - Raised when tool access requires human approval. - :raises ~azure.core.exceptions.HttpResponseError: - Raised for HTTP communication failures. - - """ - - existing_names: set[str] = set() - - tools: List[FoundryTool] = [] - - # Fetch MCP tools and Tools API tools in parallel - # Build list of coroutines to gather based on configuration - tasks = [] - if ( - self._config.tool_config._named_mcp_tools - and len(self._config.tool_config._named_mcp_tools) > 0 - ): - tasks.append(self._mcp_tools.list_tools(existing_names)) - if ( - self._config.tool_config._remote_tools - and len(self._config.tool_config._remote_tools) > 0 - ): - tasks.append(self._remote_tools.resolve_tools(existing_names)) - - # Execute all tasks in parallel if any exist - if tasks: - results = await gather(*tasks) - for result in results: - tools.extend(result) - - for tool in tools: - # Capture tool in a closure to avoid shadowing issues - def make_invoker(captured_tool): - async def _invoker(*args, **kwargs): - return await self.invoke_tool(captured_tool, *args, **kwargs) - return _invoker - tool.invoker = make_invoker(tool) - - return tools - - @distributed_trace_async - async def invoke_tool( - self, - tool: Union[str, FoundryTool], - *args: Any, - **kwargs: Any, - ) -> Any: - """Invoke a tool by instance, name, or descriptor. - - :param tool: Tool to invoke, specified as an AzureAITool instance, - tool name string, or FoundryTool. - :type tool: Union[~AzureAITool, str, ~Tool_Client.models.FoundryTool] - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result of invoking the tool. - :rtype: Any - """ - descriptor = await self._resolve_tool_descriptor(tool) - payload = InvocationPayloadBuilder.build_payload(args, kwargs, configuration={}) - return await self._invoke_tool(descriptor, payload, **kwargs) - - async def _resolve_tool_descriptor( - self, tool: Union[str, FoundryTool] - ) -> FoundryTool: - """Resolve a tool reference to a descriptor. - - :param tool: Tool to resolve, either a FoundryTool instance or a string name/key. - :type tool: Union[str, FoundryTool] - :return: The resolved FoundryTool descriptor. - :rtype: FoundryTool - """ - if isinstance(tool, FoundryTool): - return tool - if isinstance(tool, str): - # Fetch all tools and find matching descriptor - descriptors = await self.list_tools() - for descriptor in descriptors: - if tool in (descriptor.name, descriptor.key): - return descriptor - raise KeyError(f"Unknown tool: {tool}") - raise TypeError("Tool must be an AsyncAzureAITool, FoundryTool, or registered name/key") - - async def _invoke_tool(self, descriptor: FoundryTool, arguments: Mapping[str, Any], **kwargs: Any) -> Any: #pylint: disable=unused-argument - """Invoke a tool descriptor. - - :param descriptor: The tool descriptor to invoke. - :type descriptor: FoundryTool - :param arguments: Arguments to pass to the tool. - :type arguments: Mapping[str, Any] - :return: The result of the tool invocation. - :rtype: Any - """ - if descriptor.source is ToolSource.MCP_TOOLS: - return await self._mcp_tools.invoke_tool(descriptor, arguments) - if descriptor.source is ToolSource.REMOTE_TOOLS: - return await self._remote_tools.invoke_tool(descriptor, arguments) - raise ValueError(f"Unsupported tool source: {descriptor.source}") - - async def close(self) -> None: - """Close the underlying HTTP pipeline.""" - await self._client.close() - - async def __aenter__(self) -> "AzureAIToolClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details: Any) -> None: - await self._client.__aexit__(*exc_details) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_configuration.py deleted file mode 100644 index 4eb5503dee8d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_configuration.py +++ /dev/null @@ -1,86 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from typing import Any, Mapping, List, Optional, TYPE_CHECKING - -from azure.core.pipeline import policies - -from .._utils._model_base import ToolConfigurationParser - -if TYPE_CHECKING: - from azure.core.credentials_async import AsyncTokenCredential - -class AzureAIToolClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for Azure AI Tool Client. - - Manages authentication, endpoint configuration, and policy settings for the - Azure AI Tool Client. This class is used internally by the client and should - not typically be instantiated directly. - - :param str endpoint: - Fully qualified endpoint for the Azure AI Agents service. - :param credential: - Azure TokenCredential for authentication. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str api_version: - API version to use. Default is the latest supported version. - :keyword List[str] credential_scopes: - OAuth2 scopes for token requests. Default is ["https://ai.azure.com/.default"]. - :keyword str agent_name: - Name of the agent. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations. - :keyword Mapping[str, Any] user: - User information for tool invocations. - """ - - def __init__( - self, - endpoint: str, - credential: "AsyncTokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the configuration. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional configuration options. - """ - api_version: str = kwargs.pop("api_version", "2025-05-15-preview") - - self.endpoint = endpoint - self.credential = credential - self.api_version = api_version - self.credential_scopes = kwargs.pop("credential_scopes", ["https://ai.azure.com/.default"]) - - # Tool configuration - self.agent_name: str = kwargs.pop("agent_name", "$default") - self.tools: Optional[List[Mapping[str, Any]]] = kwargs.pop("tools", None) - self.user: Optional[Mapping[str, Any]] = kwargs.pop("user", None) - - # Initialize tool configuration parser - self.tool_config = ToolConfigurationParser(self.tools) - - self._configure(**kwargs) - - # Warn about unused kwargs - if kwargs: - import warnings - warnings.warn(f"Unused configuration parameters: {list(kwargs.keys())}", UserWarning) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( - self.credential, *self.credential_scopes, **kwargs - ) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/operations/_operations.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/operations/_operations.py deleted file mode 100644 index 7d1310518519..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/operations/_operations.py +++ /dev/null @@ -1,187 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: ignore-errors - -import json -from typing import Any, Dict, List, Mapping, MutableMapping - -from azure.core import AsyncPipelineClient -from ..._exceptions import OAuthConsentRequiredError -from .._configuration import AzureAIToolClientConfiguration - -from ...operations._operations import ( - build_remotetools_invoke_tool_request, - build_remotetools_resolve_tools_request, - prepare_remotetools_invoke_tool_request_content, - prepare_remotetools_resolve_tools_request_content, - build_mcptools_list_tools_request, - prepare_mcptools_list_tools_request_content, - build_mcptools_invoke_tool_request, - prepare_mcptools_invoke_tool_request_content, - API_VERSION, - MCP_ENDPOINT_PATH, - TOOL_PROPERTY_OVERRIDES, - DEFAULT_ERROR_MAP, - MCP_HEADERS, - REMOTE_TOOLS_HEADERS, - prepare_request_headers, - prepare_error_map, - handle_response_error, - build_list_tools_request, - process_list_tools_response, - build_invoke_mcp_tool_request, - build_resolve_tools_request, - process_resolve_tools_response, - build_invoke_remote_tool_request, - process_invoke_remote_tool_response, -) -from ..._model_base import FoundryTool, ToolSource, UserInfo - -from ..._utils._model_base import ToolsResponse, ToolDescriptorBuilder, ToolConfigurationParser, ResolveToolsRequest -from ..._utils._model_base import to_remote_server, MCPToolsListResponse, MetadataMapper - -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.pipeline import PipelineResponse - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) - -class MCPToolsOperations: - - def __init__(self, *args, **kwargs) -> None: - """Initialize MCP client. - - Parameters - ---------- - client : AsyncPipelineClient - Azure AsyncPipelineClient for HTTP requests - config : AzureAIToolClientConfiguration - Configuration object - """ - input_args = list(args) - self._client : AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - self._endpoint_path = MCP_ENDPOINT_PATH - self._api_version = API_VERSION - - async def list_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """List MCP tools. - - :return: List of tool descriptors from MCP server. - :rtype: List[FoundryTool] - """ - _request, error_map, remaining_kwargs = build_list_tools_request(self._api_version, kwargs) - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request, **remaining_kwargs) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return process_list_tools_response(response, self._config.tool_config._named_mcp_tools, existing_names) - - async def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - **kwargs: Any - ) -> Any: - """Invoke an MCP tool. - - :param tool: Tool descriptor for the tool to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_mcp_tool_request(self._api_version, tool, arguments) - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request, **kwargs) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return response.json().get("result") - -class RemoteToolsOperations: - def __init__(self, *args, **kwargs) -> None: - """Initialize Tools API client. - - :param client: Azure PipelineClient for HTTP requests. - :type client: ~azure.core.PipelineClient - :param config: Configuration object. - :type config: ~Tool_Client.models.AzureAIToolClientConfiguration - :raises ValueError: If required parameters are not provided. - """ - input_args = list(args) - self._client : AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - - # Apply agent name substitution to endpoint paths - self.agent = self._config.agent_name.strip() if self._config.agent_name and self._config.agent_name.strip() else "$default" - self._api_version = API_VERSION - - async def resolve_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """Resolve remote tools from Azure AI Tools API. - - :return: List of tool descriptors from Tools API. - :rtype: List[FoundryTool] - """ - result = build_resolve_tools_request(self.agent, self._api_version, self._config.tool_config, self._config.user, kwargs) - if result[0] is None: - return [] - - _request, error_map, remaining_kwargs = result - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request, **remaining_kwargs) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return process_resolve_tools_response(response, self._config.tool_config._remote_tools, existing_names) - - async def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - ) -> Any: - """Invoke a remote tool. - - :param tool: Tool descriptor to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_remote_tool_request(self.agent, self._api_version, tool, self._config.user, arguments) - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return process_invoke_remote_tool_response(response) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/operations/_operations.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/operations/_operations.py deleted file mode 100644 index 0a84ef2e6409..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/operations/_operations.py +++ /dev/null @@ -1,551 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: ignore-errors - -import json -from typing import Any, Dict, List, Mapping, MutableMapping, Tuple, Union -from azure.core import PipelineClient -from .._configuration import AzureAIToolClientConfiguration -from .._model_base import FoundryTool, ToolSource, UserInfo - -from .._utils._model_base import ToolsResponse, ToolDescriptorBuilder, ToolConfigurationParser, ResolveToolsRequest -from .._utils._model_base import to_remote_server, MCPToolsListResponse, MetadataMapper -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse - -from .._exceptions import OAuthConsentRequiredError - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) - - -# Shared constants -API_VERSION = "2025-11-15-preview" -MCP_ENDPOINT_PATH = "/mcp_tools" - -# Tool-specific property key overrides -# Format: {"tool_name": {"tool_def_key": "meta_schema_key"}} -TOOL_PROPERTY_OVERRIDES: Dict[str, Dict[str, str]] = { - "image_generation": { - "model": "imagegen_model_deployment_name" - }, - # Add more tool-specific mappings as needed -} - -# Shared error map -DEFAULT_ERROR_MAP: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, -} - -# Shared header configurations -MCP_HEADERS = { - "Content-Type": "application/json", - "Accept": "application/json,text/event-stream", - "Connection": "keep-alive", - "Cache-Control": "no-cache", -} - -REMOTE_TOOLS_HEADERS = { - "Content-Type": "application/json", - "Accept": "application/json", -} - -# Helper functions for request/response processing -def prepare_request_headers(base_headers: Dict[str, str], custom_headers: Mapping[str, str] = None) -> Dict[str, str]: - """Prepare request headers by merging base and custom headers. - - :param base_headers: Base headers to use - :param custom_headers: Custom headers to merge - :return: Merged headers dictionary - """ - headers = base_headers.copy() - if custom_headers: - headers.update(custom_headers) - return headers - -def prepare_error_map(custom_error_map: Mapping[int, Any] = None) -> MutableMapping: - """Prepare error map by merging default and custom error mappings. - - :param custom_error_map: Custom error mappings to merge - :return: Merged error map - """ - error_map = DEFAULT_ERROR_MAP.copy() - if custom_error_map: - error_map.update(custom_error_map) - return error_map - -def format_and_execute_request( - client: PipelineClient, - request: HttpRequest, - endpoint: str, - **kwargs: Any -) -> HttpResponse: - """Format request URL and execute pipeline. - - :param client: Pipeline client - :param request: HTTP request to execute - :param endpoint: Endpoint URL for formatting - :return: HTTP response - """ - path_format_arguments = {"endpoint": endpoint} - request.url = client.format_url(request.url, **path_format_arguments) - pipeline_response: PipelineResponse = client._pipeline.run(request, **kwargs) - return pipeline_response.http_response - -def handle_response_error(response: HttpResponse, error_map: MutableMapping) -> None: - """Handle HTTP response errors. - - :param response: HTTP response to check - :param error_map: Error map for status code mapping - :raises HttpResponseError: If response status is not 200 - """ - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - -def process_list_tools_response( - response: HttpResponse, - named_mcp_tools: Any, - existing_names: set -) -> List[FoundryTool]: - """Process list_tools response and build descriptors. - - :param response: HTTP response with MCP tools - :param named_mcp_tools: Named MCP tools configuration - :param existing_names: Set of existing tool names - :return: List of tool descriptors - """ - mcp_response = MCPToolsListResponse.from_dict(response.json(), named_mcp_tools) - raw_tools = mcp_response.result.tools - return ToolDescriptorBuilder.build_descriptors( - raw_tools, - ToolSource.MCP_TOOLS, - existing_names, - ) - -def process_resolve_tools_response( - response: HttpResponse, - remote_tools: Any, - existing_names: set -) -> List[FoundryTool]: - """Process resolve_tools response and build descriptors. - - :param response: HTTP response with remote tools - :param remote_tools: Remote tools configuration - :param existing_names: Set of existing tool names - :return: List of tool descriptors - """ - payload = response.json() - response_type = payload.get("type") - result = payload.get("toolResult") - - if response_type == "OAuthConsentRequired": - consent_url = result.get("consentUrl") - message = result.get("message") - if not consent_url: - consent_url = message - raise OAuthConsentRequiredError(message, consent_url=consent_url, payload=payload) - - toolResponse = ToolsResponse.from_dict(payload, remote_tools) - return ToolDescriptorBuilder.build_descriptors( - toolResponse.enriched_tools, - ToolSource.REMOTE_TOOLS, - existing_names, - ) - -def build_list_tools_request( - api_version: str, - kwargs: Dict[str, Any] -) -> Tuple[HttpRequest, MutableMapping, Dict[str, str]]: - """Build request for listing MCP tools. - - :param api_version: API version - :param kwargs: Additional arguments (headers, params, error_map) - :return: Tuple of (request, error_map, params) - """ - error_map = prepare_error_map(kwargs.pop("error_map", None)) - _headers = prepare_request_headers(MCP_HEADERS, kwargs.pop("headers", None)) - _params = kwargs.pop("params", {}) or {} - - _content = prepare_mcptools_list_tools_request_content() - content = json.dumps(_content) - _request = build_mcptools_list_tools_request(api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map, kwargs - -def build_invoke_mcp_tool_request( - api_version: str, - tool: FoundryTool, - arguments: Mapping[str, Any], - **kwargs: Any -) -> Tuple[HttpRequest, MutableMapping]: - """Build request for invoking MCP tool. - - :param api_version: API version - :param tool: Tool descriptor - :param arguments: Tool arguments - :return: Tuple of (request, error_map) - """ - error_map = prepare_error_map() - _headers = prepare_request_headers(MCP_HEADERS) - _params = {} - - _content = prepare_mcptools_invoke_tool_request_content(tool, arguments, TOOL_PROPERTY_OVERRIDES) - - content = json.dumps(_content) - _request = build_mcptools_invoke_tool_request(api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map - -def build_resolve_tools_request( - agent_name: str, - api_version: str, - tool_config: ToolConfigurationParser, - user: UserInfo, - kwargs: Dict[str, Any] -) -> Union[Tuple[HttpRequest, MutableMapping, Dict[str, Any]], Tuple[None, None, None]]: - """Build request for resolving remote tools. - - :param agent_name: Agent name - :param api_version: API version - :param tool_config: Tool configuration - :param user: User info - :param kwargs: Additional arguments - :return: Tuple of (request, error_map, remaining_kwargs) or (None, None, None) - """ - error_map = prepare_error_map(kwargs.pop("error_map", None)) - _headers = prepare_request_headers(REMOTE_TOOLS_HEADERS, kwargs.pop("headers", None)) - _params = kwargs.pop("params", {}) or {} - - _content = prepare_remotetools_resolve_tools_request_content(tool_config, user) - if _content is None: - return None, None, None - - content = json.dumps(_content.to_dict()) - _request = build_remotetools_resolve_tools_request(agent_name, api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map, kwargs - -def build_invoke_remote_tool_request( - agent_name: str, - api_version: str, - tool: FoundryTool, - user: UserInfo, - arguments: Mapping[str, Any] -) -> Tuple[HttpRequest, MutableMapping]: - """Build request for invoking remote tool. - - :param agent_name: Agent name - :param api_version: API version - :param tool: Tool descriptor - :param user: User info - :param arguments: Tool arguments - :return: Tuple of (request, error_map) - """ - error_map = prepare_error_map() - _headers = prepare_request_headers(REMOTE_TOOLS_HEADERS) - _params = {} - - _content = prepare_remotetools_invoke_tool_request_content(tool, user, arguments) - content = json.dumps(_content) - _request = build_remotetools_invoke_tool_request(agent_name, api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map - -def process_invoke_remote_tool_response(response: HttpResponse) -> Any: - """Process remote tool invocation response. - - :param response: HTTP response - :return: Tool result - :raises OAuthConsentRequiredError: If OAuth consent is required - """ - payload = response.json() - response_type = payload.get("type") - result = payload.get("toolResult") - - if response_type == "OAuthConsentRequired": - raise OAuthConsentRequiredError(result.get("message"), consent_url=result.get("consentUrl"), payload=payload) - return result - -class MCPToolsOperations: - - def __init__(self, *args, **kwargs) -> None: - """Initialize MCP client. - - Parameters - ---------- - client : PipelineClient - Azure PipelineClient for HTTP requests - config : AzureAIToolClientConfiguration - Configuration object - """ - input_args = list(args) - self._client : PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - self._endpoint_path = MCP_ENDPOINT_PATH - self._api_version = API_VERSION - - def list_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """List MCP tools. - - :return: List of tool descriptors from MCP server. - :rtype: List[FoundryTool] - """ - _request, error_map, remaining_kwargs = build_list_tools_request(self._api_version, kwargs) - response = format_and_execute_request(self._client, _request, self._config.endpoint, **remaining_kwargs) - handle_response_error(response, error_map) - return process_list_tools_response(response, self._config.tool_config._named_mcp_tools, existing_names) - - def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - ) -> Any: - """Invoke an MCP tool. - - :param tool: Tool descriptor for the tool to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_mcp_tool_request(self._api_version, tool, arguments) - response = format_and_execute_request(self._client, _request, self._config.endpoint) - handle_response_error(response, error_map) - return response.json().get("result") - -def prepare_mcptools_list_tools_request_content() -> Any: - return { - "jsonrpc": "2.0", - "id": 1, - "method": "tools/list", - "params": {} - } - -def build_mcptools_list_tools_request( - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any - ) -> HttpRequest: - """Build the HTTP request for listing MCP tools. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/mcp_tools" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) - -def prepare_mcptools_invoke_tool_request_content(tool: FoundryTool, arguments: Mapping[str, Any], tool_overrides: Dict[str, Dict[str, str]]) -> Any: - - params = { - "name": tool.name, - "arguments": dict(arguments), - } - - if tool.tool_definition: - - key_overrides = tool_overrides.get(tool.name, {}) - meta_config = MetadataMapper.prepare_metadata_dict( - tool.metadata, - tool.tool_definition.__dict__ if hasattr(tool.tool_definition, '__dict__') else tool.tool_definition, - key_overrides - ) - if meta_config: - params["_meta"] = meta_config - - payload = { - "jsonrpc": "2.0", - "id": 2, - "method": "tools/call", - "params": params - } - return payload - -def build_mcptools_invoke_tool_request( - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any -) -> HttpRequest: - """Build the HTTP request for invoking an MCP tool. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/mcp_tools" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) - -class RemoteToolsOperations: - def __init__(self, *args, **kwargs) -> None: - """Initialize Tools API client. - - :param client: Azure PipelineClient for HTTP requests. - :type client: ~azure.core.PipelineClient - :param config: Configuration object. - :type config: ~Tool_Client.models.AzureAIToolClientConfiguration - :raises ValueError: If required parameters are not provided. - """ - input_args = list(args) - self._client : PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - - # Apply agent name substitution to endpoint paths - self.agent = self._config.agent_name.strip() if self._config.agent_name and self._config.agent_name.strip() else "$default" - self._api_version = API_VERSION - - def resolve_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """Resolve remote tools from Azure AI Tools API. - - :return: List of tool descriptors from Tools API. - :rtype: List[FoundryTool] - """ - result = build_resolve_tools_request(self.agent, self._api_version, self._config.tool_config, self._config.user, kwargs) - if result[0] is None: - return [] - - _request, error_map, remaining_kwargs = result - response = format_and_execute_request(self._client, _request, self._config.endpoint, **remaining_kwargs) - handle_response_error(response, error_map) - return process_resolve_tools_response(response, self._config.tool_config._remote_tools, existing_names) - - def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - ) -> Any: - """Invoke a remote tool. - - :param tool: Tool descriptor to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_remote_tool_request(self.agent, self._api_version, tool, self._config.user, arguments) - response = format_and_execute_request(self._client, _request, self._config.endpoint) - handle_response_error(response, error_map) - return process_invoke_remote_tool_response(response) - -def prepare_remotetools_invoke_tool_request_content(tool: FoundryTool, user: UserInfo, arguments: Mapping[str, Any]) -> Any: - payload = { - "toolName": tool.name, - "arguments": dict(arguments), - "remoteServer": to_remote_server(tool.tool_definition).to_dict(), - } - if user: - # Handle both UserInfo objects and dictionaries - if isinstance(user, dict): - if user.get("objectId") and user.get("tenantId"): - payload["user"] = { - "objectId": user["objectId"], - "tenantId": user["tenantId"], - } - elif hasattr(user, "objectId") and hasattr(user, "tenantId"): - if user.objectId and user.tenantId: - payload["user"] = { - "objectId": user.objectId, - "tenantId": user.tenantId, - } - return payload - -def build_remotetools_invoke_tool_request( - agent_name: str, - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any - ) -> HttpRequest: - """Build the HTTP request for invoking a remote tool. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/agents/{agent_name}/tools/invoke" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) - - -def prepare_remotetools_resolve_tools_request_content(tool_config: ToolConfigurationParser, user: UserInfo = None) -> ResolveToolsRequest: - resolve_tools_request: ResolveToolsRequest = None - if tool_config._remote_tools: - remote_servers = [] - for remote_tool in tool_config._remote_tools: - remote_servers.append(to_remote_server(remote_tool)) - resolve_tools_request = ResolveToolsRequest(remote_servers, user=user) - - return resolve_tools_request - -def build_remotetools_resolve_tools_request( - agent_name: str, - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any - ) -> HttpRequest: - """Build the HTTP request for resolving remote tools. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/agents/{agent_name}/tools/resolve" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_context.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_context.py new file mode 100644 index 000000000000..f86d1ae0d4ac --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_context.py @@ -0,0 +1,32 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import AsyncContextManager, ClassVar, Optional + +from azure.ai.agentserver.core.tools import FoundryToolRuntime + + +class AgentServerContext(AsyncContextManager["AgentServerContext"]): + _INSTANCE: ClassVar[Optional["AgentServerContext"]] = None + + def __init__(self, tool_runtime: FoundryToolRuntime): + self._tool_runtime = tool_runtime + + self.__class__._INSTANCE = self + + @classmethod + def get(cls) -> "AgentServerContext": + if cls._INSTANCE is None: + raise ValueError("AgentServerContext has not been initialized.") + return cls._INSTANCE + + @property + def tools(self) -> FoundryToolRuntime: + return self._tool_runtime + + async def __aenter__(self) -> "AgentServerContext": + await self._tool_runtime.__aenter__() + return self + + async def __aexit__(self, exc_type, exc_value, traceback) -> None: + await self._tool_runtime.__aexit__(exc_type, exc_value, traceback) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index cf85b2fcea07..c8c797321af2 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -12,6 +12,8 @@ from typing import Any, AsyncGenerator, Generator, Optional, Union import uvicorn +from azure.core.credentials import TokenCredential +from azure.core.credentials_async import AsyncTokenCredential from opentelemetry import context as otel_context, trace from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from starlette.applications import Starlette @@ -25,17 +27,19 @@ from azure.identity.aio import DefaultAzureCredential as AsyncDefaultTokenCredential +from ._context import AgentServerContext from ..models import projects as project_models from ..constants import Constants -from ..logger import APPINSIGHT_CONNSTR_ENV_NAME, get_logger, request_context +from ..logger import APPINSIGHT_CONNSTR_ENV_NAME, get_logger, get_project_endpoint, request_context from ..models import ( Response as OpenAIResponse, ResponseStreamEvent, ) from .common.agent_run_context import AgentRunContext -from ..client.tools.aio._client import AzureAIToolClient -from ..client.tools._utils._model_base import ToolDefinition, UserInfo +from ..tools import DefaultFoundryToolRuntime, FoundryTool, FoundryToolClient, FoundryToolRuntime, UserInfo, \ + UserInfoContextMiddleware +from ..utils._credential import AsyncTokenCredentialAdapter logger = get_logger() DEBUG_ERRORS = os.environ.get(Constants.AGENT_DEBUG_ERRORS, "false").lower() == "true" @@ -47,10 +51,8 @@ def __init__(self, app: ASGIApp, agent: Optional['FoundryCBAgent'] = None): self.agent = agent async def dispatch(self, request: Request, call_next): - user_info: Optional[UserInfo] = None if request.url.path in ("/runs", "/responses"): try: - user_info = self.set_user_info_to_context_var(request) self.set_request_id_to_context_var(request) payload = await request.json() except Exception as e: @@ -58,7 +60,7 @@ async def dispatch(self, request: Request, call_next): return JSONResponse({"error": f"Invalid JSON payload: {e}"}, status_code=400) try: agent_tools = self.agent.tools if self.agent else [] - request.state.agent_run_context = AgentRunContext(payload, user_info=user_info, agent_tools=agent_tools) + request.state.agent_run_context = AgentRunContext(payload, agent_tools=agent_tools) self.set_run_context_to_context_var(request.state.agent_run_context) except Exception as e: logger.error(f"Context build failed: {e}.", exc_info=True) @@ -93,37 +95,17 @@ def set_run_context_to_context_var(self, run_context): ctx.update(res) request_context.set(ctx) - def set_user_info_to_context_var(self, request) -> Optional[UserInfo]: - user_info: Optional[UserInfo] = None - try: - object_id_header = request.headers.get("x-aml-oid", None) - tenant_id_header = request.headers.get("x-aml-tid", None) - if not object_id_header and not tenant_id_header: - return None - user_info = UserInfo( - objectId=object_id_header, - tenantId=tenant_id_header - ) - - except Exception as e: - logger.error(f"Failed to parse X-User-Info header: {e}", exc_info=True) - if user_info: - ctx = request_context.get() or {} - for key, value in user_info.to_dict().items(): - if key == "objectId": - continue # skip user objectId - ctx[f"azure.ai.agentserver.user.{key}"] = str(value) - request_context.set(ctx) - return user_info - class FoundryCBAgent: - _cached_tools_endpoint: Optional[str] = None - _cached_agent_name: Optional[str] = None - - def __init__(self, credentials: Optional["AsyncTokenCredential"] = None, **kwargs: Any) -> None: - self.credentials = credentials or AsyncDefaultTokenCredential() - self.tools = kwargs.get("tools", []) + def __init__(self, + credentials: Optional[Union[AsyncTokenCredential, TokenCredential]] = None, + project_endpoint: Optional[str] = None, + **kwargs: Any) -> None: + self.credentials = AsyncTokenCredentialAdapter(credentials) if credentials else AsyncDefaultTokenCredential() + project_endpoint = get_project_endpoint() or project_endpoint + if not project_endpoint: + raise ValueError("Project endpoint is required.") + AgentServerContext(DefaultFoundryToolRuntime(project_endpoint, self.credentials)) async def runs_endpoint(request): # Set up tracing context and span @@ -202,6 +184,7 @@ async def readiness_endpoint(request): ] self.app = Starlette(routes=routes) + UserInfoContextMiddleware.install(self.app) self.app.add_middleware( CORSMiddleware, allow_origins=["*"], @@ -424,91 +407,17 @@ def setup_otlp_exporter(self, endpoint, provider): provider.add_span_processor(processor) logger.info(f"Tracing setup with OTLP exporter: {endpoint}") - @staticmethod - def _configure_endpoint() -> tuple[str, Optional[str]]: - """Configure and return the tools endpoint and agent name from environment variables. - - :return: A tuple of (tools_endpoint, agent_name). - :rtype: tuple[str, Optional[str]] - """ - if not FoundryCBAgent._cached_tools_endpoint: - project_endpoint_format: str = "https://{account_name}.services.ai.azure.com/api/projects/{project_name}" - workspace_endpoint = os.getenv(Constants.AZURE_AI_WORKSPACE_ENDPOINT) - tools_endpoint = os.getenv(Constants.AZURE_AI_TOOLS_ENDPOINT) - project_endpoint = os.getenv(Constants.AZURE_AI_PROJECT_ENDPOINT) - - if not tools_endpoint: - # project endpoint corrupted could have been an overridden environment variable - # try to reconstruct tools endpoint from workspace endpoint - # Robustly reconstruct project_endpoint from workspace_endpoint if needed. - - if workspace_endpoint: - # Expected format: - # "https://.api.azureml.ms/subscriptions//resourceGroups// - # providers/Microsoft.MachineLearningServices/workspaces/@@AML" - from urllib.parse import urlparse - parsed_url = urlparse(workspace_endpoint) - path_parts = [p for p in parsed_url.path.split('/') if p] - # Find the 'workspaces' part and extract account_name@project_name@AML - try: - workspaces_idx = path_parts.index("workspaces") - if workspaces_idx + 1 >= len(path_parts): - raise ValueError( - f"Workspace endpoint path does not contain workspace info " - f"after 'workspaces': {workspace_endpoint}" - ) - workspace_info = path_parts[workspaces_idx + 1] - workspace_parts = workspace_info.split('@') - if len(workspace_parts) < 2: - raise ValueError( - f"Workspace info '{workspace_info}' does not contain both account_name " - f"and project_name separated by '@'." - ) - account_name = workspace_parts[0] - project_name = workspace_parts[1] - # Documented expected format for PROJECT_ENDPOINT_FORMAT: - # "https://.api.azureml.ms/api/projects/{project_name}" - project_endpoint = project_endpoint_format.format( - account_name=account_name, project_name=project_name - ) - except (ValueError, IndexError) as e: - raise ValueError( - f"Failed to reconstruct project endpoint from workspace endpoint " - f"'{workspace_endpoint}': {e}" - ) from e - # should never reach here - logger.info("Reconstructed tools endpoint from project endpoint %s", project_endpoint) - tools_endpoint = project_endpoint - - tools_endpoint = project_endpoint - - if not tools_endpoint: - raise ValueError( - "Project endpoint needed for Azure AI tools endpoint is not found. " - ) - FoundryCBAgent._cached_tools_endpoint = tools_endpoint - - agent_name = os.getenv(Constants.AGENT_NAME) - if agent_name is None: - if os.getenv("CONTAINER_APP_NAME"): - raise ValueError( - "Agent name needed for Azure AI hosted agents is not found. " - ) - agent_name = "$default" - FoundryCBAgent._cached_agent_name = agent_name - - return FoundryCBAgent._cached_tools_endpoint, FoundryCBAgent._cached_agent_name - def get_tool_client( - self, tools: Optional[list[ToolDefinition]], user_info: Optional[UserInfo] - ) -> AzureAIToolClient: + self, tools: Optional[list[FoundryTool]], user_info: Optional[UserInfo] + ) -> FoundryToolClient: + # TODO: remove this method logger.debug("Creating AzureAIToolClient with tools: %s", tools) if not self.credentials: raise ValueError("Credentials are required to create Tool Client.") tools_endpoint, agent_name = self._configure_endpoint() - return AzureAIToolClient( + return FoundryToolClient( endpoint=tools_endpoint, credential=self.credentials, tools=tools, diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py index 5289df0b3524..53eb15af3550 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py @@ -7,7 +7,8 @@ from ...models.projects import AgentId, AgentReference, ResponseConversation1 from .id_generator.foundry_id_generator import FoundryIdGenerator from .id_generator.id_generator import IdGenerator -from ...client.tools._model_base import UserInfo +from ...tools import UserInfo + logger = get_logger() @@ -65,12 +66,14 @@ def get_conversation_object(self) -> ResponseConversation1: def get_tools(self) -> list: # request tools take precedence over agent tools + # TODO: remove this method request_tools = self.request.get("tools", []) if not request_tools: return self._agent_tools return request_tools def get_user_info(self) -> UserInfo: + # TODO: remove this method return self._user_info diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/__init__.py new file mode 100644 index 000000000000..f158cd370990 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/__init__.py @@ -0,0 +1,17 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) + +from .client._client import FoundryToolClient +from ._exceptions import * +from .client._models import FoundryConnectedTool, FoundryHostedMcpTool, FoundryTool, FoundryToolProtocol, \ + FoundryToolSource, ResolvedFoundryTool, SchemaDefinition, SchemaProperty, SchemaType, UserInfo +from .runtime._catalog import * +from .runtime._facade import * +from .runtime._invoker import * +from .runtime._resolver import * +from .runtime._runtime import * +from .runtime._starlette import * +from .runtime._user import * \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py new file mode 100644 index 000000000000..534cc0c5daa7 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py @@ -0,0 +1,74 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .client._models import FoundryTool, ResolvedFoundryTool + + +class ToolInvocationError(RuntimeError): + """Raised when a tool invocation fails. + + :ivar ResolvedFoundryTool tool: The tool that failed during invocation. + + :param str message: Human-readable message describing the error. + :param ResolvedFoundryTool tool: The tool that failed during invocation. + + This exception is raised when an error occurs during the invocation of a tool, + providing details about the failure. + """ + + def __init__(self, message: str, tool: ResolvedFoundryTool): + super().__init__(message) + self.tool = tool + + +class OAuthConsentRequiredError(RuntimeError): + """Raised when the service requires end-user OAuth consent. + + This exception is raised when a tool or service operation requires explicit + OAuth consent from the end user before the operation can proceed. + + :ivar str message: Human-readable guidance returned by the service. + :ivar str consent_url: Link that the end user must visit to provide consent. + :ivar str project_connection_id: The project connection ID related to the consent request. + + :param str message: Human-readable guidance returned by the service. + :param str consent_url: Link that the end user must visit to provide the required consent. + :param str project_connection_id: The project connection ID related to the consent request. + """ + + def __init__(self, message: str, consent_url: str, project_connection_id: str): + super().__init__(message) + self.message = message + self.consent_url = consent_url + self.project_connection_id = project_connection_id + + +class UnableToResolveToolInvocationError(RuntimeError): + """Raised when a tool cannot be resolved. + + :ivar str message: Human-readable message describing the error. + :ivar FoundryTool tool: The tool that could not be resolved. + + :param str message: Human-readable message describing the error. + :param FoundryTool tool: The tool that could not be resolved. + + This exception is raised when a tool cannot be found or resolved + from the available tool sources. + """ + + def __init__(self, message: str, tool: FoundryTool): + super().__init__(message) + self.tool = tool + + +class InvalidToolFacadeError(RuntimeError): + """Raised when a tool facade is invalid. + + This exception is raised when a tool facade does not conform + to the expected structure or contains invalid data. + """ + pass + diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/__init__.py new file mode 100644 index 000000000000..28077537d94b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py new file mode 100644 index 000000000000..9b7f269ea6ce --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py @@ -0,0 +1,134 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from asyncio import gather +from collections import defaultdict +from typing import Any, AsyncContextManager, DefaultDict, Dict, List, Mapping, Optional + +from azure.core import AsyncPipelineClient +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.tracing.decorator_async import distributed_trace_async + +from ._models import FoundryTool, FoundryToolDetails, FoundryToolSource, ResolvedFoundryTool, UserInfo +from ._configuration import FoundryToolClientConfiguration +from .._exceptions import ToolInvocationError +from .operations._foundry_connected_tools import FoundryConnectedToolsOperations +from .operations._foundry_hosted_mcp_tools import FoundryMcpToolsOperations + + +class FoundryToolClient(AsyncContextManager["FoundryToolClient"]): + """Asynchronous client for aggregating tools from Azure AI MCP and Tools APIs. + + This client provides access to tools from both MCP (Model Context Protocol) servers + and Azure AI Tools API endpoints, enabling unified tool discovery and invocation. + + :param str endpoint: + The fully qualified endpoint for the Azure AI Agents service. + Example: "https://.api.azureml.ms" + :param credential: + Credential for authenticating requests to the service. + Use credentials from azure-identity like DefaultAzureCredential. + :type credential: ~azure.core.credentials.TokenCredential + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential"): + """Initialize the asynchronous Azure AI Tool Client. + + :param endpoint: The service endpoint URL. + :type endpoint: str + :param credential: Credentials for authenticating requests. + :type credential: ~azure.core.credentials.TokenCredential + """ + # noinspection PyTypeChecker + config = FoundryToolClientConfiguration(credential) + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=endpoint, config=config) + + self._hosted_mcp_tools = FoundryMcpToolsOperations(self._client) + self._connected_tools = FoundryConnectedToolsOperations(self._client) + + @distributed_trace_async + async def list_tools(self, + tools: List[FoundryTool], + agent_name, + user: Optional[UserInfo] = None) -> Mapping[FoundryTool, FoundryToolDetails]: + """List all available tools from configured sources. + + Retrieves tools from both MCP servers and Azure AI Tools API endpoints, + returning them as ResolvedFoundryTool instances ready for invocation. + :param tools: List of FoundryTool instances to resolve. + :type tools: List[~FoundryTool] + :param user: Information about the user requesting the tools. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent requesting the tools. + :type agent_name: str + :return: List of available tools from all configured sources. + :rtype: List[~FoundryTool] + :raises ~azure.ai.agentserver.core.tools._exceptions.ToolInvocationError: + Raised when the service requires user OAuth consent. + :raises ~azure.core.exceptions.HttpResponseError: + Raised for HTTP communication failures. + """ + tools_by_source: DefaultDict[FoundryToolSource, List[FoundryTool]] = defaultdict(list) + for t in tools: + tools_by_source[t.source].append(t) + + tasks = [] + if FoundryToolSource.HOSTED_MCP in tools_by_source: + # noinspection PyTypeChecker + tasks.append(self._hosted_mcp_tools.list_tools(tools_by_source[FoundryToolSource.HOSTED_MCP])) + if FoundryToolSource.CONNECTED in tools_by_source: + # noinspection PyTypeChecker + tasks.append(self._connected_tools.list_tools(tools_by_source[FoundryToolSource.CONNECTED], + user, + agent_name)) + + resolved_tools: Dict[FoundryTool, FoundryToolDetails] = {} + if tasks: + results = await gather(*tasks) + for result in results: + resolved_tools.update(result) + + return resolved_tools + + @distributed_trace_async + async def invoke_tool(self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + agent_name: str, + user: Optional[UserInfo] = None) -> Any: + """Invoke a tool by instance, name, or descriptor. + + :param tool: Tool to invoke, specified as an AzureAITool instance, + tool name string, or FoundryTool. + :type tool: ResolvedFoundryTool + :param arguments: Arguments to pass to the tool. + :type arguments: Dict[str, Any] + :param user: Information about the user invoking the tool. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent invoking the tool. + :type agent_name: str + :return: The result of invoking the tool. + :rtype: Any + :raises ~Tool_Client.exceptions.OAuthConsentRequiredError: + Raised when the service requires user OAuth consent. + :raises ~azure.core.exceptions.HttpResponseError: + Raised for HTTP communication failures. + :raises ~ToolInvocationError: + Raised when the tool invocation fails or source is not supported. + """ + if tool.source is FoundryToolSource.HOSTED_MCP: + return await self._hosted_mcp_tools.invoke_tool(tool, arguments) + if tool.source is FoundryToolSource.CONNECTED: + return await self._connected_tools.invoke_tool(tool, arguments, user, agent_name) + raise ToolInvocationError(f"Unsupported tool source: {tool.source}", tool=tool) + + async def close(self) -> None: + """Close the underlying HTTP pipeline.""" + await self._client.close() + + async def __aenter__(self) -> "FoundryToolClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_configuration.py new file mode 100644 index 000000000000..5c3f19a61d55 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_configuration.py @@ -0,0 +1,35 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from azure.core.configuration import Configuration +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.pipeline import policies + +from ...application._package_metadata import get_current_app + + +class FoundryToolClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for Azure AI Tool Client. + + Manages authentication, endpoint configuration, and policy settings for the + Azure AI Tool Client. This class is used internally by the client and should + not typically be instantiated directly. + + :param credential: + Azure TokenCredential for authentication. + :type credential: ~azure.core.credentials.TokenCredential + """ + + def __init__(self, credential: "AsyncTokenCredential"): + super().__init__() + + self.retry_policy = policies.AsyncRetryPolicy() + self.logging_policy = policies.NetworkTraceLoggingPolicy() + self.request_id_policy = policies.RequestIdPolicy() + self.http_logging_policy = policies.HttpLoggingPolicy() + self.user_agent_policy = policies.UserAgentPolicy( + base_user_agent=get_current_app().as_user_agent("FoundryToolClient")) + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + credential, "https://ai.azure.com/.default" + ) + self.redirect_policy = policies.AsyncRedirectPolicy() diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py new file mode 100644 index 000000000000..095b79e23c38 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py @@ -0,0 +1,592 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import asyncio +import inspect +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from enum import Enum +from typing import Annotated, Any, Awaitable, Callable, ClassVar, Dict, Iterable, List, Literal, Mapping, Optional, Set, Type, Union + +from azure.core import CaseInsensitiveEnumMeta +from pydantic import AliasChoices, AliasPath, BaseModel, Discriminator, Field, ModelWrapValidatorHandler, Tag, \ + TypeAdapter, model_validator + +from .._exceptions import OAuthConsentRequiredError + + +class FoundryToolSource(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Identifies the origin of a tool. + + Specifies whether a tool comes from an MCP (Model Context Protocol) server + or from the Azure AI Tools API (remote tools). + """ + + HOSTED_MCP = "hosted_mcp" + CONNECTED = "connected" + + +class FoundryToolProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Identifies the protocol used by a connected tool.""" + + MCP = "mcp" + A2A = "a2a" + + +@dataclass(frozen=True, kw_only=True) +class FoundryTool(ABC): + """Definition of a foundry tool including its parameters.""" + source: FoundryToolSource = field(init=False) + + +@dataclass(frozen=True, kw_only=True) +class FoundryHostedMcpTool(FoundryTool): + """Foundry MCP tool definition. + + :ivar str name: Name of MCP tool. + :ivar Mapping[str, Any] configuration: Tools configuration. + """ + source: Literal[FoundryToolSource.HOSTED_MCP] = field(init=False, default=FoundryToolSource.HOSTED_MCP) + name: str + configuration: Optional[Mapping[str, Any]] = None + + def __str__(self): + return f"{self.source}:{self.name}" + + +@dataclass(frozen=True, kw_only=True) +class FoundryConnectedTool(FoundryTool): + """Foundry connected tool definition. + + :ivar str project_connection_id: connection name of foundry tool. + """ + source: Literal[FoundryToolSource.CONNECTED] = field(init=False, default=FoundryToolSource.CONNECTED) + protocol: str + project_connection_id: str + + def __str__(self): + return f"{self.source}:{self.protocol}:{self.project_connection_id}" + + +@dataclass(frozen=True) +class FoundryToolDetails: + """Details about a Foundry tool. + + :ivar str name: Name of the tool. + :ivar str description: Description of the tool. + :ivar SchemaDefinition input_schema: Input schema for the tool parameters. + :ivar Optional[SchemaDefinition] metadata: Optional metadata schema for the tool. + """ + name: str + description: str + input_schema: "SchemaDefinition" + metadata: Optional["SchemaDefinition"] = None + + +@dataclass(frozen=True) +class ResolvedFoundryTool: + """Resolved Foundry tool with definition and details. + + :ivar ToolDefinition definition: + Optional tool definition object, or None. + :ivar FoundryToolDetails details: + Details about the tool, including name, description, and input schema. + """ + + definition: FoundryTool + details: FoundryToolDetails + invoker: Optional[Callable[..., Awaitable[Any]]] = None # TODO: deprecated + + @property + def source(self) -> FoundryToolSource: + """Origin of the tool.""" + return self.definition.source + + @property + def name(self) -> str: + """Name of the tool.""" + return self.details.name + + @property + def description(self) -> str: + """Description of the tool.""" + return self.details.description + + @property + def input_schema(self) -> "SchemaDefinition": + """Input schema of the tool.""" + return self.details.input_schema + + @property + def metadata(self) -> Optional["SchemaDefinition"]: + """Metadata schema of the tool, if any.""" + return self.details.metadata + + def invoke(self, *args: Any, **kwargs: Any) -> Any: + """Invoke the tool synchronously. + + :param args: Positional arguments to pass to the tool. + :type args: Any + :return: The result from the tool invocation. + :rtype: Any + """ + + if not self.invoker: + raise NotImplementedError("No invoker function defined for this tool.") + if inspect.iscoroutinefunction(self.invoker): + # If the invoker is async, check if we're already in an event loop + try: + asyncio.get_running_loop() + # We're in a running loop, can't use asyncio.run() + raise RuntimeError( + "Cannot call invoke() on an async tool from within an async context. " + "Use 'await tool.ainvoke(...)' or 'await tool(...)' instead." + ) + except RuntimeError as e: + if "no running event loop" in str(e).lower(): + # No running loop, safe to use asyncio.run() + return asyncio.run(self.invoker(*args, **kwargs)) + # Re-raise our custom error + raise + else: + return self.invoker(*args, **kwargs) + + async def ainvoke(self, *args: Any, **kwargs: Any) -> Any: + """Invoke the tool asynchronously. + + :param args: Positional arguments to pass to the tool. + :type args: Any + :return: The result from the tool invocation. + :rtype: Any + """ + + if not self.invoker: + raise NotImplementedError("No invoker function defined for this tool.") + if inspect.iscoroutinefunction(self.invoker): + return await self.invoker(*args, **kwargs) + + result = self.invoker(*args, **kwargs) + # If the result is awaitable (e.g., a coroutine), await it + if inspect.iscoroutine(result) or hasattr(result, '__await__'): + return await result + return result + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + + # Check if the invoker is async + if self.invoker and inspect.iscoroutinefunction(self.invoker): + # Return coroutine for async context + return self.ainvoke(*args, **kwargs) + + # Use sync invoke + return self.invoke(*args, **kwargs) + + +@dataclass(frozen=True) +class UserInfo: + """Represents user information. + + :ivar str object_id: User's object identifier. + :ivar str tenant_id: Tenant identifier. + """ + + object_id: str + tenant_id: str + + +class SchemaType(str, Enum): + """ + Enumeration of possible schema types. + + :ivar py_type: The corresponding Python runtime type for this schema type + (e.g., ``SchemaType.STRING.py_type is str``). + """ + + py_type: Type[Any] + """The corresponding Python runtime type for this schema type.""" + + STRING = ("string", str) + """Schema type for string values (maps to ``str``).""" + + NUMBER = ("number", float) + """Schema type for numeric values with decimals (maps to ``float``).""" + + INTEGER = ("integer", int) + """Schema type for integer values (maps to ``int``).""" + + BOOLEAN = ("boolean", bool) + """Schema type for boolean values (maps to ``bool``).""" + + ARRAY = ("array", list) + """Schema type for array values (maps to ``list``).""" + + OBJECT = ("object", dict) + """Schema type for object/dictionary values (maps to ``dict``).""" + + def __new__(cls, value: str, py_type: Type[Any]): + """ + Create an enum member whose value is the schema type string, while also + attaching the mapped Python type. + + :param value: The serialized schema type string (e.g. ``"string"``). + :param py_type: The mapped Python runtime type (e.g. ``str``). + """ + obj = str.__new__(cls, value) + obj._value_ = value + obj.py_type = py_type + return obj + + @classmethod + def from_python_type(cls, t: Type[Any]) -> "SchemaType": + """ + Get the matching :class:`SchemaType` for a given Python runtime type. + + :param t: A Python runtime type (e.g. ``str``, ``int``, ``float``). + :returns: The corresponding :class:`SchemaType`. + :raises ValueError: If ``t`` is not supported by this enumeration. + """ + for member in cls: + if member.py_type is t: + return member + raise ValueError(f"Unsupported python type: {t!r}") + + +class SchemaProperty(BaseModel): + """ + A JSON Schema-like description of a single property (field) or nested schema node. + + This model is intended to be recursively nestable via :attr:`items` (for arrays) + and :attr:`properties` (for objects). + + :ivar type: The schema node type (e.g., ``string``, ``object``, ``array``). + :ivar description: Optional human-readable description of the property. + :ivar items: The item schema for an ``array`` type. Typically set when + :attr:`type` is :data:`~SchemaType.ARRAY`. + :ivar properties: Nested properties for an ``object`` type. Typically set when + :attr:`type` is :data:`~SchemaType.OBJECT`. Keys are property names, values + are their respective schemas. + :ivar default: Optional default value for the property. + :ivar required: For an ``object`` schema node, the set of required property + names within :attr:`properties`. (This mirrors JSON Schema’s ``required`` + keyword; it is *not* “this property is required in a parent object”.) + """ + + type: SchemaType + description: Optional[str] = None + items: Optional["SchemaProperty"] = None + properties: Optional[Mapping[str, "SchemaProperty"]] = None + default: Any = None + required: Optional[Set[str]] = None + + def has_default(self) -> bool: + """ + Check if the property has a default value defined. + + :return: True if a default value is set, False otherwise. + :rtype: bool + """ + return "default" in self.model_fields_set + + +class SchemaDefinition(BaseModel): + """ + A top-level JSON Schema-like definition for an object. + + :ivar type: The schema type of the root. Typically :data:`~SchemaType.OBJECT`. + :ivar properties: Mapping of top-level property names to their schemas. + :ivar required: Set of required top-level property names within + :attr:`properties`. + """ + + type: SchemaType = SchemaType.OBJECT + properties: Mapping[str, SchemaProperty] + required: Optional[Set[str]] = None + + def extract_from(self, + datasource: Mapping[str, Any], + property_alias: Optional[Dict[str, List[str]]] = None) -> Dict[str, Any]: + return self._extract(datasource, self.properties, self.required, property_alias) + + @classmethod + def _extract(cls, + datasource: Mapping[str, Any], + properties: Mapping[str, SchemaProperty], + required: Optional[Set[str]] = None, + property_alias: Optional[Dict[str, List[str]]] = None) -> Dict[str, Any]: + result: Dict[str, Any] = {} + + for property_name, schema in properties.items(): + # Determine the keys to look for in the datasource + keys_to_check = [property_name] + if property_alias and property_name in property_alias: + keys_to_check.extend(property_alias[property_name]) + + # Find the first matching key in the datasource + value_found = False + for key in keys_to_check: + if key in datasource: + value = datasource[key] + value_found = True + break + + if not value_found and schema.has_default(): + value = schema.default + value_found = True + + if not value_found: + # If the property is required but not found, raise an error + if required and property_name in required: + raise KeyError(f"Required property '{property_name}' not found in datasource.") + # If not found and not required, skip to next property + continue + + # Process the value based on its schema type + if schema.type == SchemaType.OBJECT and schema.properties: + if isinstance(value, Mapping): + nested_value = cls._extract( + value, + schema.properties, + schema.required, + property_alias + ) + result[property_name] = nested_value + elif schema.type == SchemaType.ARRAY and schema.items: + if isinstance(value, Iterable): + nested_list = [] + for item in value: + if schema.items.type == SchemaType.OBJECT and schema.items.properties: + if isinstance(item, dict): + nested_item = SchemaDefinition._extract( + item, + schema.items.properties, + schema.items.required, + property_alias + ) + nested_list.append(nested_item) + else: + nested_list.append(item) + result[property_name] = nested_list + else: + result[property_name] = value + + return result + + +class RawFoundryHostedMcpTool(BaseModel): + """Pydantic model for a single MCP tool. + + :ivar str name: Unique name identifier of the tool. + :ivar Optional[str] title: Display title of the tool, defaults to name if not provided. + :ivar str description: Human-readable description of the tool. + :ivar SchemaDefinition input_schema: JSON schema for tool input parameters. + :ivar Optional[SchemaDefinition] meta: Optional metadata for the tool. + """ + + name: str + title: Optional[str] = None + description: str = "" + input_schema: SchemaDefinition = Field( + default_factory=SchemaDefinition, + validation_alias="inputSchema" + ) + meta: Optional[SchemaDefinition] = Field(default=None, validation_alias="_meta") + + def model_post_init(self, __context: Any) -> None: + if self.title is None: + self.title = self.name + + +class RawFoundryHostedMcpTools(BaseModel): + """Pydantic model for the result containing list of tools. + + :ivar List[RawFoundryHostedMcpTool] tools: List of MCP tool definitions. + """ + + tools: List[RawFoundryHostedMcpTool] = Field(default_factory=list) + + +class ListFoundryHostedMcpToolsResponse(BaseModel): + """Pydantic model for the complete MCP tools/list JSON-RPC response. + + :ivar str jsonrpc: JSON-RPC version, defaults to "2.0". + :ivar int id: Request identifier, defaults to 0. + :ivar RawFoundryHostedMcpTools result: Result containing the list of tools. + """ + + jsonrpc: str = "2.0" + id: int = 0 + result: RawFoundryHostedMcpTools = Field( + default_factory=RawFoundryHostedMcpTools + ) + + +class BaseConnectedToolsErrorResult(BaseModel, ABC): + """Base model for connected tools error responses.""" + + @abstractmethod + def as_exception(self) -> Exception: + """Convert the error result to an appropriate exception. + + :return: An exception representing the error. + :rtype: Exception + """ + raise NotImplementedError + + +class OAuthConsentRequiredErrorResult(BaseConnectedToolsErrorResult): + """Model for OAuth consent required error responses. + + :ivar Literal["OAuthConsentRequired"] type: Error type identifier. + :ivar Optional[str] consent_url: URL for user consent, if available. + :ivar Optional[str] message: Human-readable error message. + :ivar Optional[str] project_connection_id: Project connection ID related to the error. + """ + + type: Literal["OAuthConsentRequired"] + consent_url: str = Field( + validation_alias=AliasChoices( + AliasPath("toolResult", "consentUrl"), + AliasPath("toolResult", "message"), + ), + ) + message: str = Field( + validation_alias=AliasPath("toolResult", "message"), + ) + project_connection_id: str = Field( + validation_alias=AliasPath("toolResult", "projectConnectionId"), + ) + + def as_exception(self) -> Exception: + return OAuthConsentRequiredError(self.message, self.consent_url, self.project_connection_id) + + +class RawFoundryConnectedTool(BaseModel): + """Pydantic model for a single connected tool. + + :ivar str name: Name of the tool. + :ivar str description: Description of the tool. + :ivar Optional[SchemaDefinition] input_schema: Input schema for the tool parameters. + """ + name: str + description: str + input_schema: SchemaDefinition = Field( + default=SchemaDefinition, + validation_alias="parameters", + ) + + +class RawFoundryConnectedRemoteServer(BaseModel): + """Pydantic model for a connected remote server. + + :ivar str protocol: Protocol used by the remote server. + :ivar str project_connection_id: Project connection ID of the remote server. + :ivar List[RawFoundryConnectedTool] tools: List of connected tools from this server. + """ + protocol: str = Field( + validation_alias=AliasPath("remoteServer", "protocol"), + ) + project_connection_id: str = Field( + validation_alias=AliasPath("remoteServer", "projectConnectionId"), + ) + tools: List[RawFoundryConnectedTool] = Field( + default_factory=list, + validation_alias="manifest", + ) + + +class ListConnectedToolsResult(BaseModel): + """Pydantic model for the result of listing connected tools. + + :ivar List[ConnectedRemoteServer] servers: List of connected remote servers. + """ + servers: List[RawFoundryConnectedRemoteServer] = Field( + default_factory=list, + validation_alias="tools", + ) + + +class ListFoundryConnectedToolsResponse(BaseModel): + """Pydantic model for the response of listing the connected tools. + + :ivar Optional[ConnectedToolsResult] result: Result containing connected tool servers. + :ivar Optional[BaseConnectedToolsErrorResult] error: Error result, if any. + """ + + result: Optional[ListConnectedToolsResult] = None + error: Optional[BaseConnectedToolsErrorResult] = None + + # noinspection DuplicatedCode + _TYPE_ADAPTER: ClassVar[TypeAdapter] = TypeAdapter( + Annotated[ + Union[ + Annotated[ + Annotated[ + Union[OAuthConsentRequiredErrorResult], + Field(discriminator="type") + ], + Tag("ErrorType") + ], + Annotated[ListConnectedToolsResult, Tag("ResultType")], + ], + Discriminator( + lambda payload: "ErrorType" if isinstance(payload, dict) and "type" in payload else "ResultType" + ), + ]) + + @model_validator(mode="wrap") + @classmethod + def _validator(cls, data: Any, handler: ModelWrapValidatorHandler) -> "ListFoundryConnectedToolsResponse": + parsed = cls._TYPE_ADAPTER.validate_python(data) + normalized = {} + if isinstance(parsed, ListConnectedToolsResult): + normalized["result"] = parsed + elif isinstance(parsed, BaseConnectedToolsErrorResult): + normalized["error"] = parsed + return handler(normalized) + + +class InvokeConnectedToolsResult(BaseModel): + """Pydantic model for the result of invoking a connected tool. + + :ivar Any value: The result value from the tool invocation. + """ + value: Any = Field(serialization_alias="toolResult") + + +class InvokeFoundryConnectedToolsResponse(BaseModel): + """Pydantic model for the response of invoking a connected tool. + + :ivar Optional[InvokeConnectedToolsResult] result: Result of the tool invocation. + :ivar Optional[BaseConnectedToolsErrorResult] error: Error result, if any. + """ + result: Optional[InvokeConnectedToolsResult] = None + error: Optional[BaseConnectedToolsErrorResult] = None + + # noinspection DuplicatedCode + _TYPE_ADAPTER: ClassVar[TypeAdapter] = TypeAdapter( + Annotated[ + Union[ + Annotated[ + Annotated[ + Union[OAuthConsentRequiredErrorResult], + Field(discriminator="type") + ], + Tag("ErrorType") + ], + Annotated[InvokeConnectedToolsResult, Tag("ResultType")], + ], + Discriminator( + lambda payload: "ErrorType" if isinstance(payload, dict) and "type" in payload else "ResultType" + ), + ]) + + @model_validator(mode="wrap") + @classmethod + def _validator(cls, data: Any, handler: ModelWrapValidatorHandler) -> "InvokeFoundryConnectedToolsResponse": + parsed = cls._TYPE_ADAPTER.validate_python(data) + normalized = {} + if isinstance(parsed, InvokeConnectedToolsResult): + normalized["result"] = parsed + elif isinstance(parsed, BaseConnectedToolsErrorResult): + normalized["error"] = parsed + return handler(normalized) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py new file mode 100644 index 000000000000..6123305883c2 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py @@ -0,0 +1,63 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +from abc import ABC +from typing import Any, ClassVar, MutableMapping, Type + +from azure.core import AsyncPipelineClient +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, \ + ResourceNotFoundError, ResourceNotModifiedError, map_error +from azure.core.rest import AsyncHttpResponse, HttpRequest + +ErrorMapping = MutableMapping[int, Type[HttpResponseError]] + + +class BaseOperations(ABC): + DEFAULT_ERROR_MAP: ClassVar[ErrorMapping] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + + def __init__(self, client: AsyncPipelineClient, error_map: ErrorMapping | None = None) -> None: + self._client = client + self._error_map = self._prepare_error_map(error_map) + + @classmethod + def _prepare_error_map(cls, custom_error_map: ErrorMapping | None = None) -> MutableMapping: + """Prepare error map by merging default and custom error mappings. + + :param custom_error_map: Custom error mappings to merge + :return: Merged error map + """ + error_map = cls.DEFAULT_ERROR_MAP + if custom_error_map: + error_map = dict(cls.DEFAULT_ERROR_MAP) + error_map.update(custom_error_map) + return error_map + + async def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> AsyncHttpResponse: + """Send an HTTP request. + + :param request: HTTP request + :param stream: Stream to be used for HTTP requests + :param kwargs: Keyword arguments + + :return: Response object + """ + response: AsyncHttpResponse = await self._client.send_request(request, stream=stream, **kwargs) + self._handle_response_error(response) + return response + + def _handle_response_error(self, response: AsyncHttpResponse) -> None: + """Handle HTTP response errors. + + :param response: HTTP response to check + :raises HttpResponseError: If response status is not 200 + """ + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self._error_map) + raise HttpResponseError(response=response) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py new file mode 100644 index 000000000000..46b7672ae348 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py @@ -0,0 +1,176 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC +from typing import Any, ClassVar, Dict, List, Mapping, Optional, cast + +from azure.core.pipeline.transport import HttpRequest + +from .._models import FoundryConnectedTool, FoundryToolDetails, FoundryToolSource, InvokeFoundryConnectedToolsResponse, \ + ListFoundryConnectedToolsResponse, ResolvedFoundryTool, UserInfo +from ..._exceptions import ToolInvocationError +from ._base import BaseOperations + + +class BaseFoundryConnectedToolsOperations(BaseOperations, ABC): + """Base operations for Foundry connected tools.""" + + _API_VERSION: ClassVar[str] = "2025-11-15-preview" + + _HEADERS: ClassVar[Dict[str, str]] = { + "Content-Type": "application/json", + "Accept": "application/json", + } + + _QUERY_PARAMS: ClassVar[Dict[str, Any]] = { + "api-version": _API_VERSION + } + + @staticmethod + def _list_tools_path(agent_name: str) -> str: + return f"/agents/{agent_name}/tools/resolve" + + @staticmethod + def _invoke_tool_path(agent_name: str) -> str: + return f"/agents/{agent_name}/tools/invoke" + + def _build_list_tools_request( + self, + tools: List[FoundryConnectedTool], + user: Optional[UserInfo], + agent_name: str,) -> HttpRequest: + payload: Dict[str, Any] = { + "remoteServers": [ + { + "projectConnectionId": tool.project_connection_id, + "protocol": tool.protocol, + } for tool in tools + ], + } + if user: + payload["user"] = { + "objectId": user.object_id, + "tenantId": user.tenant_id, + } + return self._client.post( + self._list_tools_path(agent_name), + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=payload) + + @classmethod + def _convert_listed_tools( + cls, + resp: ListFoundryConnectedToolsResponse, + input_tools: List[FoundryConnectedTool]) -> Mapping[FoundryConnectedTool, FoundryToolDetails]: + if resp.error: + raise resp.error.as_exception() + if not resp.result: + return {} + + tool_map = {(tool.project_connection_id, tool.protocol): tool for tool in input_tools} + result = {} + for server in resp.result.servers: + input_tool = tool_map.get((server.project_connection_id, server.protocol)) + if not input_tool: + continue + + for tool in server.tools: + details = FoundryToolDetails( + name=tool.name, + description=tool.description, + input_schema=tool.input_schema, + ) + result[tool] = details + + return result + + def _build_invoke_tool_request( + self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + user: Optional[UserInfo], + agent_name: str) -> HttpRequest: + if tool.definition.source != FoundryToolSource.CONNECTED: + raise ToolInvocationError(f"Tool {tool.name} is not a Foundry connected tool.", tool=tool) + + tool_def = cast(FoundryConnectedTool, tool.definition) + payload: Dict[str, Any] = { + "toolName": tool.name, + "arguments": arguments, + "remoteServer": { + "projectConnectionId": tool_def.project_connection_id, + "protocol": tool_def.protocol, + }, + } + if user: + payload["user"] = { + "objectId": user.object_id, + "tenantId": user.tenant_id, + } + return self._client.post( + self._list_tools_path(agent_name), + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=payload) + + @classmethod + def _convert_invoke_result(cls, resp: InvokeFoundryConnectedToolsResponse) -> Any: + if resp.error: + raise resp.error.as_exception() + if not resp.result: + return None + return resp.result.value + + +class FoundryConnectedToolsOperations(BaseFoundryConnectedToolsOperations): + """Operations for managing Foundry connected tools.""" + + async def list_tools(self, + tools: List[FoundryConnectedTool], + user: Optional[UserInfo], + agent_name: str) -> Mapping[FoundryConnectedTool, FoundryToolDetails]: + """List connected tools. + + :param tools: List of connected tool definitions. + :type tools: List[FoundryConnectedTool] + :param user: User information for the request. Value can be None if running in local. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent. + :type agent_name: str + :return: Details of connected tools. + :rtype: Mapping[FoundryConnectedTool, FoundryToolDetails] + """ + if not tools: + return {} + request = self._build_list_tools_request(tools, user, agent_name) + response = await self._send_request(request) + async with response: + tools_response = ListFoundryConnectedToolsResponse.model_validate(response.json()) + return self._convert_listed_tools(tools_response, tools) + + + async def invoke_tool( + self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + user: Optional[UserInfo], + agent_name: str) -> Any: + """Invoke a connected tool. + + :param tool: Tool descriptor to invoke. + :type tool: ResolvedFoundryTool + :param arguments: Input arguments for the tool. + :type arguments: Mapping[str, Any] + :param user: User information for the request. Value can be None if running in local. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent. + :type agent_name: str + :return: Result of the tool invocation. + :rtype: Any + """ + request = self._build_invoke_tool_request(tool, arguments, user, agent_name) + response = await self._send_request(request) + async with response: + invoke_response = InvokeFoundryConnectedToolsResponse.model_validate(response.json()) + return self._convert_invoke_result(invoke_response) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py new file mode 100644 index 000000000000..19b7be63de65 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py @@ -0,0 +1,163 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC +from typing import Any, ClassVar, Dict, List, Mapping, TYPE_CHECKING, cast + +from azure.core.rest import HttpRequest + +from azure.ai.agentserver.core.tools import FoundryHostedMcpTool, FoundryToolSource, ResolvedFoundryTool, \ + ToolInvocationError +from azure.ai.agentserver.core.tools.client._models import FoundryToolDetails, ListFoundryHostedMcpToolsResponse +from azure.ai.agentserver.core.tools.client.operations._base import BaseOperations + + +class BaseFoundryHostedMcpToolsOperations(BaseOperations, ABC): + """Base operations for Foundry-hosted MCP tools.""" + + _PATH: ClassVar[str] = "/mcp_tools" + + _API_VERSION: ClassVar[str] = "2025-11-15-preview" + + _HEADERS: ClassVar[Dict[str, str]] = { + "Content-Type": "application/json", + "Accept": "application/json,text/event-stream", + "Connection": "keep-alive", + "Cache-Control": "no-cache", + } + + _QUERY_PARAMS: ClassVar[Dict[str, Any]] = { + "api-version": _API_VERSION + } + + _LIST_TOOLS_REQUEST_BODY: ClassVar[Dict[str, Any]] = { + "jsonrpc": "2.0", + "id": 1, + "method": "tools/list", + "params": {} + } + + _INVOKE_TOOL_REQUEST_BODY_TEMPLATE: ClassVar[Dict[str, Any]] = { + "jsonrpc": "2.0", + "id": 2, + "method": "tools/call", + } + + # Tool-specific property key overrides + # Format: {"tool_name": {"tool_def_key": "meta_schema_key"}} + _TOOL_PROPERTY_ALIAS: ClassVar[Dict[str, Dict[str, List[str]]]] = { + "_default": { + "imagegen_model_deployment_name": ["model_deployment_name"], + "model_deployment_name": ["model"], + "deployment_name": ["model"], + }, + "image_generation": { + "imagegen_model_deployment_name": ["model"] + }, + # Add more tool-specific mappings as needed + } + + def _build_list_tools_request(self) -> HttpRequest: + """Build request for listing MCP tools. + + :return: Request for listing MCP tools. + """ + return self._client.post(self._PATH, + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=self._LIST_TOOLS_REQUEST_BODY) + + @staticmethod + def _convert_listed_tools( + response: ListFoundryHostedMcpToolsResponse, + allowed_tools: List[FoundryHostedMcpTool]) -> Mapping[FoundryHostedMcpTool, FoundryToolDetails]: + + allowlist = {tool.name: tool for tool in allowed_tools} + result = {} + for tool in response.result.tools: + definition = allowlist.get(tool.name) + if not definition: + continue + details = FoundryToolDetails( + name=tool.name, + description=tool.description, + metadata=tool.meta, + input_schema=tool.input_schema) + result[definition] = details + + return result + + def _build_invoke_tool_request(self, tool: ResolvedFoundryTool, arguments: Dict[str, Any]) -> HttpRequest: + if tool.definition.source != FoundryToolSource.FOUNDRY_HOSTED_MCP: + raise ToolInvocationError(f"Tool {tool.name} is not a Foundry-hosted MCP tool.", tool=tool) + definition = cast(FoundryHostedMcpTool, tool.definition) if TYPE_CHECKING else tool.definition + + payload = dict(self._INVOKE_TOOL_REQUEST_BODY_TEMPLATE) + payload["params"] = { + "name": tool.name, + "arguments": arguments + } + if tool.metadata and definition.configuration: + payload["_meta"] = tool.metadata.extract_from(definition.configuration, + self._resolve_property_alias(tool.name)) + + return self._client.post(self._PATH, + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=payload) + + @classmethod + def _resolve_property_alias(cls, tool_name: str) -> Dict[str, List[str]]: + """Get property key overrides for a specific tool. + + :param tool_name: Name of the tool. + :type tool_name: str + :return: Property key overrides. + :rtype: Dict[str, List[str]] + """ + overrides = dict(cls._TOOL_PROPERTY_ALIAS.get("_default", {})) + tool_specific = cls._TOOL_PROPERTY_ALIAS.get(tool_name, {}) + overrides.update(tool_specific) + return overrides + + +class FoundryMcpToolsOperations(BaseFoundryHostedMcpToolsOperations): + """Operations for Foundry-hosted MCP tools.""" + + async def list_tools( + self, + allowed_tools: List[FoundryHostedMcpTool]) -> Mapping[FoundryHostedMcpTool, FoundryToolDetails]: + """List MCP tools. + + :param allowed_tools: List of allowed MCP tools to filter. + :type allowed_tools: List[FoundryHostedMcpTool] + :return: Details of MCP tools. + :rtype: Mapping[FoundryHostedMcpTool, FoundryToolDetails] + """ + if not allowed_tools: + return {} + + request = self._build_list_tools_request() + response = await self._send_request(request) + async with response: + tools_response = ListFoundryHostedMcpToolsResponse.model_validate(response.json()) + return self._convert_listed_tools(tools_response, allowed_tools) + + async def invoke_tool( + self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + ) -> Any: + """Invoke an MCP tool. + + :param tool: Tool descriptor for the tool to invoke. + :type tool: ResolvedFoundryTool + :param arguments: Input arguments for the tool. + :type arguments: Dict[str, Any] + :return: Result of the tool invocation. + :rtype: Any + """ + request = self._build_invoke_tool_request(tool, arguments) + response = await self._send_request(request) + async with response: + return response.json().get("result") diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/__init__.py new file mode 100644 index 000000000000..28077537d94b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py new file mode 100644 index 000000000000..4221dd5401cd --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py @@ -0,0 +1,128 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import asyncio +from abc import ABC, abstractmethod +from typing import Any, Awaitable, Dict, List, Mapping, MutableMapping, Optional + +from cachetools import TTLCache + +from ._facade import FoundryToolLike, ensure_foundry_tool +from ._user import UserProvider +from ..client._client import FoundryToolClient +from ..client._models import FoundryTool, FoundryToolDetails, FoundryToolSource, ResolvedFoundryTool, UserInfo + + +class FoundryToolCatalog(ABC): + """Base class for Foundry tool catalogs.""" + def __init__(self, user_provider: UserProvider): + self._user_provider = user_provider + + async def get(self, tool: FoundryToolLike) -> Optional[ResolvedFoundryTool]: + """Gets a Foundry tool by its definition. + + :param tool: The Foundry tool to resolve. + :type tool: FoundryToolLike + :return: The resolved Foundry tool. + :rtype: Optional[ResolvedFoundryTool] + """ + tools = await self.list([tool]) + return tools[0] if tools else None + + @abstractmethod + async def list(self, tools: List[FoundryToolLike]) -> List[ResolvedFoundryTool]: + """Lists all available Foundry tools. + + :param tools: The list of Foundry tools to resolve. + :type tools: List[FoundryToolLike] + :return: A list of resolved Foundry tools. + :rtype: List[ResolvedFoundryTool] + """ + raise NotImplementedError + + +class CachedFoundryToolCatalog(FoundryToolCatalog, ABC): + """Cached implementation of FoundryToolCatalog with concurrency-safe caching.""" + + def __init__(self, user_provider: UserProvider): + super().__init__(user_provider) + self._cache: MutableMapping[FoundryTool, Awaitable[Optional[FoundryToolDetails]]] = self._create_cache() + + def _create_cache(self) -> MutableMapping[FoundryTool, Awaitable[Optional[FoundryToolDetails]]]: + return TTLCache(maxsize=1024, ttl=600) + + def _get_key(self, user: Optional[UserInfo], tool: FoundryTool) -> Any: + if tool.source is FoundryToolSource.HOSTED_MCP: + return tool + return user, tool + + async def list(self, tools: List[FoundryToolLike]) -> List[ResolvedFoundryTool]: + """Lists all available Foundry tools with concurrency-safe caching. + + :param tools: The list of Foundry tools to resolve. + :type tools: List[FoundryToolLike] + :return: A list of resolved Foundry tools. + :rtype: List[ResolvedFoundryTool] + """ + user = await self._user_provider.get_user() + tools: List[FoundryTool] = [ensure_foundry_tool(tool) for tool in tools] # type: ignore + + # for tools that are not being listed, create a batch task, convert to per-tool resolving tasks, and cache them + tools_to_fetch = [tool for tool in tools if self._get_key(user, tool) not in self._cache] + if tools_to_fetch: + # Awaitable[Mapping[FoundryTool, FoundryToolDetails]] + fetched_tools = asyncio.create_task(self._fetch_tools(tools_to_fetch, user)) + + for tool in tools_to_fetch: + # safe to write cache since it's the only runner in this event loop + self._cache[tool] = asyncio.create_task(self._as_resolving_task(tool, fetched_tools)) + + # now we have every tool associated with a task + resolving_tasks: Dict[FoundryTool, Awaitable[Optional[FoundryToolDetails]]] = { + tool: self._cache[self._get_key(user, tool)] + for tool in tools + } + + await asyncio.gather(*resolving_tasks.values()) + + resolved_tools = [] + for tool, task in resolving_tasks.items(): + # this acts like a lock - every task of the same tool waits for the same underlying fetch + details = await task + if details is not None: + # filter out unresolved tools in _as_resolving_task() + resolved_tools.append( + ResolvedFoundryTool( + definition=tool, + details=details + ) + ) + + return resolved_tools + + @staticmethod + async def _as_resolving_task( + tool: FoundryTool, + fetching: Awaitable[Mapping[FoundryTool, FoundryToolDetails]]) -> Optional[FoundryToolDetails]: + details = await fetching + return details.get(tool, None) + + @abstractmethod + async def _fetch_tools(self, + tools: List[FoundryTool], + user: Optional[UserInfo]) -> Mapping[FoundryTool, FoundryToolDetails]: + raise NotImplementedError + + +class DefaultFoundryToolCatalog(CachedFoundryToolCatalog): + """Default implementation of FoundryToolCatalog.""" + + def __init__(self, client: FoundryToolClient, user_provider: UserProvider, agent_name: str): + super().__init__(user_provider) + self._client = client + self._agent_name = agent_name + + async def _fetch_tools(self, + tools: List[FoundryTool], + user: Optional[UserInfo]) -> Mapping[FoundryTool, FoundryToolDetails]: + return await self._client.list_tools(tools, self._agent_name, user) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_facade.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_facade.py new file mode 100644 index 000000000000..ebaca87cf1a7 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_facade.py @@ -0,0 +1,49 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import Any, Dict, Union + +from .. import FoundryConnectedTool, FoundryHostedMcpTool +from .._exceptions import InvalidToolFacadeError +from ..client._models import FoundryTool, FoundryToolProtocol + +# FoundryToolFacade: a “tool descriptor” bag. +# +# Reserved keys: +# Required: +# - "type": str Discriminator, e.g. "mcp" | "a2a" | "code_interpreter" | ... +# Optional: +# - "project_connection_id": str Project connection id of Foundry connected tools, required with "type" is "mcp" or a2a. +# +# Custom keys: +# - Allowed, but MUST NOT shadow reserved keys. +FoundryToolFacade = Dict[str, Any] + +FoundryToolLike = Union[FoundryToolFacade, FoundryTool] + + +def ensure_foundry_tool(tool: FoundryToolLike) -> FoundryTool: + """Ensure the input is a FoundryTool instance. + + :param tool: The tool descriptor, either as a FoundryToolFacade or FoundryTool. + :type tool: FoundryToolLike + :return: The corresponding FoundryTool instance. + :rtype: FoundryTool + """ + if isinstance(tool, FoundryTool): + return tool + + tool = tool.copy() + tool_type = tool.pop("type", None) + if not isinstance(tool_type, str) or not tool_type: + raise InvalidToolFacadeError("FoundryToolFacade must have a valid 'type' field of type str.") + + try: + protocol = FoundryToolProtocol(tool_type) + project_connection_id = tool.pop("project_connection_id", None) + if not isinstance(project_connection_id, str) or not project_connection_id: + raise InvalidToolFacadeError(f"project_connection_id is required for tool protocol {protocol}.") + + return FoundryConnectedTool(protocol=protocol, project_connection_id=project_connection_id) + except: + return FoundryHostedMcpTool(name=tool_type, configuration=tool) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_invoker.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_invoker.py new file mode 100644 index 000000000000..d24c79dd4d12 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_invoker.py @@ -0,0 +1,69 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC, abstractmethod +from typing import Any, Dict + +from ._user import UserProvider +from ..client._client import FoundryToolClient +from ..client._models import ResolvedFoundryTool + + +class FoundryToolInvoker(ABC): + """Abstract base class for Foundry tool invokers.""" + + @property + @abstractmethod + def resolved_tool(self) -> ResolvedFoundryTool: + """Get the resolved tool definition. + + :return: The tool definition. + :rtype: ResolvedFoundryTool + """ + raise NotImplementedError + + @abstractmethod + async def invoke(self, arguments: Dict[str, Any]) -> Any: + """Invoke the tool with the given arguments. + + :param arguments: The arguments to pass to the tool. + :type arguments: Dict[str, Any] + :return: The result of the tool invocation + :rtype: Any + """ + raise NotImplementedError + + +class DefaultFoundryToolInvoker(FoundryToolInvoker): + """Default implementation of FoundryToolInvoker.""" + + def __init__(self, + resolved_tool: ResolvedFoundryTool, + client: FoundryToolClient, + user_provider: UserProvider, + agent_name: str): + self._resolved_tool = resolved_tool + self._client = client + self._user_provider = user_provider + self._agent_name = agent_name + + @property + def resolved_tool(self) -> ResolvedFoundryTool: + """Get the resolved tool definition. + + :return: The tool definition. + :rtype: ResolvedFoundryTool + """ + return self._resolved_tool + + async def invoke(self, arguments: Dict[str, Any]) -> Any: + """Invoke the tool with the given arguments. + + :param arguments: The arguments to pass to the tool + :type arguments: Dict[str, Any] + :return: The result of the tool invocation + :rtype: Any + """ + user = await self._user_provider.get_user() + result = await self._client.invoke_tool(self._resolved_tool, arguments, self._agent_name, user) + return result diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_resolver.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_resolver.py new file mode 100644 index 000000000000..efccc7449161 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_resolver.py @@ -0,0 +1,55 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC, abstractmethod + +from ._catalog import FoundryToolCatalog +from ._facade import FoundryToolLike, ensure_foundry_tool +from ._invoker import DefaultFoundryToolInvoker, FoundryToolInvoker +from ._user import UserProvider +from .. import FoundryToolClient +from .._exceptions import UnableToResolveToolInvocationError +from ..client._models import FoundryTool + + +class FoundryToolInvocationResolver(ABC): + """Resolver for Foundry tool invocations.""" + + @abstractmethod + async def resolve(self, tool: FoundryToolLike) -> FoundryToolInvoker: + """Resolves a Foundry tool invocation. + + :param tool: The Foundry tool to resolve. + :type tool: FoundryToolLike + :return: The resolved Foundry tool invoker. + :rtype: FoundryToolInvoker + """ + raise NotImplementedError + + +class DefaultFoundryToolInvocationResolver(FoundryToolInvocationResolver): + """Default implementation of FoundryToolInvocationResolver.""" + + def __init__(self, + catalog: FoundryToolCatalog, + client: FoundryToolClient, + user_provider: UserProvider, + agent_name: str): + self._catalog = catalog + self._client = client + self._user_provider = user_provider + self._agent_name = agent_name + + async def resolve(self, tool: FoundryToolLike) -> FoundryToolInvoker: + """Resolves a Foundry tool invocation. + + :param tool: The Foundry tool to resolve. + :type tool: FoundryToolLike + :return: The resolved Foundry tool invoker. + :rtype: FoundryToolInvoker + """ + tool = ensure_foundry_tool(tool) + resolved_tool = await self._catalog.get(tool) + if not resolved_tool: + raise UnableToResolveToolInvocationError(f"Unable to resolve tool {tool} from catalog", tool) + return DefaultFoundryToolInvoker(resolved_tool, self._client, self._user_provider, self._agent_name) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_runtime.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_runtime.py new file mode 100644 index 000000000000..6ee9930181d1 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_runtime.py @@ -0,0 +1,86 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import os +from typing import Any, AsyncContextManager, Dict, Optional + +from azure.core.credentials_async import AsyncTokenCredential + +from ._catalog import DefaultFoundryToolCatalog, FoundryToolCatalog +from ._facade import FoundryToolLike +from ._resolver import DefaultFoundryToolInvocationResolver, FoundryToolInvocationResolver +from ._user import ContextVarUserProvider, UserProvider +from ..client._client import FoundryToolClient +from ...constants import Constants + + +class FoundryToolRuntime(AsyncContextManager["FoundryToolRuntime"]): + """Base class for Foundry tool runtimes.""" + + @property + def catalog(self) -> FoundryToolCatalog: + """The tool catalog. + + :return: The tool catalog. + :rtype: FoundryToolCatalog + """ + raise NotImplementedError + + @property + def invocation(self) -> FoundryToolInvocationResolver: + """The tool invocation resolver. + + :return: The tool invocation resolver. + :rtype: FoundryToolInvocationResolver + """ + raise NotImplementedError + + async def invoke(self, tool: FoundryToolLike, arguments: Dict[str, Any]) -> Any: + """Invoke a tool with the given arguments. + + :param tool: The tool to invoke. + :type tool: FoundryToolLike + :param arguments: The arguments to pass to the tool. + :type arguments: Dict[str, Any] + :return: The result of the tool invocation. + :rtype: Any + """ + invoker = await self.invocation.resolve(tool) + return await invoker.invoke(arguments) + + +class DefaultFoundryToolRuntime(FoundryToolRuntime): + """Default implementation of FoundryToolRuntime.""" + + def __init__(self, + project_endpoint: str, + credential: "AsyncTokenCredential", + user_provider: Optional[UserProvider] = None): + # Do we need introduce DI here? + self._user_provider = user_provider or ContextVarUserProvider() + self._agent_name = os.getenv(Constants.AGENT_NAME, "$default") + self._client = FoundryToolClient(endpoint=project_endpoint, credential=credential) + self._catalog = DefaultFoundryToolCatalog(client=self._client, + user_provider=self._user_provider, + agent_name=self._agent_name) + self._invocation = DefaultFoundryToolInvocationResolver(catalog=self._catalog, + client=self._client, + user_provider=self._user_provider, + agent_name=self._agent_name) + + @property + def catalog(self) -> FoundryToolCatalog: + """The tool catalog.""" + return self._catalog + + @property + def invocation(self) -> FoundryToolInvocationResolver: + """The tool invocation resolver.""" + return self._invocation + + async def __aenter__(self) -> "DefaultFoundryToolRuntime": + await self._client.__aenter__() + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self._client.__aexit__(exc_type, exc_value, traceback) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py new file mode 100644 index 000000000000..17b25095a953 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py @@ -0,0 +1,65 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from contextvars import ContextVar +from typing import Awaitable, Callable, Optional + +from starlette.applications import Starlette +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.types import ASGIApp + +from ._user import ContextVarUserProvider, resolve_user_from_headers +from ..client._models import UserInfo + +_UserContextType = ContextVar[Optional[UserInfo]] +_ResolverType = Callable[[Request], Awaitable[Optional[UserInfo]]] + +class UserInfoContextMiddleware(BaseHTTPMiddleware): + """Middleware to set user information in a context variable for each request.""" + + def __init__(self, app: ASGIApp, user_info_var: _UserContextType, user_resolver: _ResolverType): + super().__init__(app) + self._user_info_var = user_info_var + self._user_resolver = user_resolver + + @classmethod + def install(cls, + app: Starlette, + user_context: Optional[_UserContextType] = None, + user_resolver: Optional[_ResolverType] = None): + """Install the middleware into a Starlette application. + + :param app: The Starlette application to install the middleware into. + :type app: Starlette + :param user_context: Optional context variable to use for storing user info. + If not provided, a default context variable will be used. + :type user_context: Optional[ContextVar[Optional[UserInfo]]] + :param user_resolver: Optional function to resolve user info from the request. + If not provided, a default resolver will be used. + :type user_resolver: Optional[Callable[[Request], Awaitable[Optional[UserInfo]]]] + """ + app.add_middleware(UserInfoContextMiddleware, + user_info_var=user_context or ContextVarUserProvider.default_user_info_context, + user_resolver=user_resolver or cls._default_user_resolver) + + @staticmethod + async def _default_user_resolver(request: Request) -> Optional[UserInfo]: + return resolve_user_from_headers(request.headers) + + async def dispatch(self, request: Request, call_next): + """Process the incoming request, setting the user info in the context variable. + + :param request: The incoming Starlette request. + :type request: Request + :param call_next: The next middleware or endpoint to call. + :type call_next: Callable[[Request], Awaitable[Response]] + :return: The response from the next middleware or endpoint. + :rtype: Response + """ + user = await self._user_resolver(request) + token = self._user_info_var.set(user) + try: + return await call_next(request) + finally: + self._user_info_var.reset(token) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_user.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_user.py new file mode 100644 index 000000000000..14d8aad2690a --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_user.py @@ -0,0 +1,52 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from contextvars import ContextVar +from abc import ABC, abstractmethod +from typing import ClassVar, Mapping, Optional + +from ..client._models import UserInfo + + +class UserProvider(ABC): + """Base class for user providers.""" + + @abstractmethod + async def get_user(self) -> Optional[UserInfo]: + """Get the user information.""" + raise NotImplementedError + + +class ContextVarUserProvider(UserProvider): + """User provider that retrieves user information from a ContextVar.""" + default_user_info_context: ClassVar[ContextVar[UserInfo]] = ContextVar("user_info_context") + + def __init__(self, context: Optional[ContextVar[UserInfo]] = None): + self.context = context or self.default_user_info_context + + async def get_user(self) -> Optional[UserInfo]: + """Get the user information from the context variable.""" + return self.context.get(None) + + +def resolve_user_from_headers(headers: Mapping[str, str], + object_id_header: str = "x-aml-oid", + tenant_id_header: str = "x-aml-tid") -> Optional[UserInfo]: + """Resolve user information from HTTP headers. + + :param headers: The HTTP headers. + :type headers: Mapping[str, str] + :param object_id_header: The header name for the object ID. + :type object_id_header: str + :param tenant_id_header: The header name for the tenant ID. + :type tenant_id_header: str + :return: The user information or None if not found. + :rtype: Optional[UserInfo] + """ + object_id = headers.get(object_id_header, "") + tenant_id = headers.get(tenant_id_header, "") + + if not object_id or not tenant_id: + return None + + return UserInfo(object_id=object_id, tenant_id=tenant_id) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/__init__.py new file mode 100644 index 000000000000..28077537d94b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py new file mode 100644 index 000000000000..24de2e1345a4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py @@ -0,0 +1,89 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import asyncio +import inspect +from types import TracebackType +from typing import Any, Optional, Sequence, Type, Union + +from azure.core.credentials import AccessToken, TokenCredential +from azure.core.credentials_async import AsyncTokenCredential + + +async def _to_thread(func, *args, **kwargs): + """Compatibility wrapper for asyncio.to_thread (Python 3.8+).""" + if hasattr(asyncio, "to_thread"): + return await asyncio.to_thread(func, *args, **kwargs) # py>=3.9 + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, lambda: func(*args, **kwargs)) + + +class AsyncTokenCredentialAdapter(AsyncTokenCredential): + """ + AsyncTokenCredential adapter for either: + - azure.core.credentials.TokenCredential (sync) + - azure.core.credentials_async.AsyncTokenCredential (async) + """ + + def __init__(self, credential: TokenCredential |AsyncTokenCredential) -> None: + if not hasattr(credential, "get_token"): + raise TypeError("credential must have a get_token method") + self._credential = credential + self._is_async = isinstance(credential, AsyncTokenCredential) or inspect.iscoroutinefunction( + getattr(credential, "get_token", None) + ) + + async def get_token( + self, + *scopes: str, + claims: str | None = None, + tenant_id: str | None = None, + enable_cae: bool = False, + **kwargs: Any, + ) -> AccessToken: + if self._is_async: + return await self._credential.get_token(*scopes, + claims=claims, + tenant_id=tenant_id, + enable_cae=enable_cae, + **kwargs) + return await _to_thread(self._credential.get_token, + *scopes, + claims=claims, + tenant_id=tenant_id, + enable_cae=enable_cae, + **kwargs) + + async def close(self) -> None: + """ + Best-effort resource cleanup: + - if underlying has async close(): await it + - else if underlying has sync close(): run it in a thread + """ + close_fn = getattr(self._credential, "close", None) + if close_fn is None: + return + + if inspect.iscoroutinefunction(close_fn): + await close_fn() + else: + await _to_thread(close_fn) + + async def __aenter__(self) -> "AsyncTokenCredentialAdapter": + enter = getattr(self._credential, "__aenter__", None) + if enter is not None and inspect.iscoroutinefunction(enter): + await enter() + return self + + async def __aexit__( + self, + exc_type: Type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + aexit = getattr(self._credential, "__aexit__", None) + if aexit is not None and inspect.iscoroutinefunction(aexit): + return await aexit(exc_type, exc_value, traceback) + await self.close() \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml index 9f3d01c09c88..e53b8f5474b7 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml @@ -30,6 +30,7 @@ dependencies = [ "starlette>=0.45.0", "uvicorn>=0.31.0", "aiohttp>=3.13.0", # used by azure-identity aio + "cachetools" ] [build-system] diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py index 569166bc3786..d841a0afb459 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py @@ -8,6 +8,7 @@ from ._version import VERSION from .tool_client import ToolClient from .langgraph import LangGraphAdapter +from ..core.application._package_metadata import PackageMetadata, set_current_app if TYPE_CHECKING: # pragma: no cover from . import models @@ -26,3 +27,5 @@ def from_langgraph( __all__ = ["from_langgraph", "ToolClient"] __version__ = VERSION + +set_current_app(PackageMetadata.from_dist("azure-ai-agentserver-langgraph")) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py index 51937fe31986..58a1c2b80faf 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py @@ -11,7 +11,7 @@ from langchain_core.tools import StructuredTool from langgraph.graph.state import CompiledStateGraph -from azure.ai.agentserver.core.client.tools import OAuthConsentRequiredError +from ..core.tools._exceptions import OAuthConsentRequiredError from azure.ai.agentserver.core.constants import Constants from azure.ai.agentserver.core.logger import get_logger from azure.ai.agentserver.core.server.base import FoundryCBAgent diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py index 78baf96bee80..9819e83dafca 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py @@ -9,7 +9,8 @@ from pydantic import BaseModel, Field, create_model if TYPE_CHECKING: - from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient, FoundryTool + from azure.ai.agentserver.core.tools import FoundryToolClient, ResolvedFoundryTool + # pylint: disable=client-accepts-api-version-keyword,missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs class ToolClient: @@ -57,7 +58,7 @@ class ToolClient: :meta private: """ - def __init__(self, tool_client: "AzureAIToolClient") -> None: + def __init__(self, tool_client: "FoundryToolClient") -> None: """Initialize the ToolClient. :param tool_client: The AzureAIToolClient instance to use for tool operations. @@ -101,7 +102,7 @@ async def list_tools(self) -> List[StructuredTool]: return self._langchain_tools_cache - def _convert_to_langchain_tool(self, azure_tool: "FoundryTool") -> StructuredTool: + def _convert_to_langchain_tool(self, azure_tool: "ResolvedFoundryTool") -> StructuredTool: """Convert an AzureAITool to a LangChain StructuredTool. :param azure_tool: The AzureAITool to convert. diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/__init__.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/__init__.py new file mode 100644 index 000000000000..28077537d94b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py b/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py index 7daa62d0ec9f..2e64e67f3b11 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py @@ -14,7 +14,7 @@ from langchain_openai import AzureChatOpenAI from langgraph.checkpoint.memory import MemorySaver -from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient +from azure.ai.agentserver.core.tools import FoundryToolClient from azure.ai.agentserver.langgraph import ToolClient, from_langgraph from azure.identity.aio import DefaultAzureCredential @@ -59,7 +59,7 @@ async def quickstart(): ] # Create the AzureAIToolClient # This client supports both MCP tools and Azure AI Tools API - tool_client = AzureAIToolClient( + tool_client = FoundryToolClient( endpoint=project_endpoint, credential=credential, tools=tool_definitions