Skip to content

Commit 76f935b

Browse files
committed
Lint and mypy fixes
1 parent 07af6de commit 76f935b

File tree

8 files changed

+47
-139
lines changed

8 files changed

+47
-139
lines changed

sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_client.py

Lines changed: 1 addition & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -13,58 +13,6 @@
1313
from .operations._operations import MCPToolsOperations, RemoteToolsOperations
1414
from ._utils._model_base import InvocationPayloadBuilder
1515
from ._model_base import FoundryTool, ToolSource
16-
17-
class AzureAITool:
18-
"""Azure AI tool wrapper for invocation.
19-
20-
Represents a single tool that can be invoked either via MCP protocol or
21-
Azure AI Tools API. This class provides a convenient interface for tool
22-
invocation and exposes tool metadata.
23-
24-
:ivar str name: The name of the tool.
25-
:ivar str description: Human-readable description of what the tool does.
26-
:ivar dict metadata: Additional metadata about the tool from the API.
27-
:ivar ~Tool_Client.models.ToolSource source:
28-
The source of the tool (MCP_TOOLS or REMOTE_TOOLS).
29-
30-
.. admonition:: Example:
31-
32-
.. literalinclude:: ../samples/simple_example.py
33-
:start-after: [START use_tool]
34-
:end-before: [END use_tool]
35-
:language: python
36-
:dedent: 4
37-
:caption: Using an AzureAITool instance.
38-
"""
39-
40-
def __init__(self, client: "AzureAIToolClient", descriptor: FoundryTool) -> None:
41-
"""Initialize an Azure AI Tool.
42-
43-
:param client: Parent client instance for making API calls.
44-
:type client: AzureAIToolClient
45-
:param descriptor: Tool descriptor containing metadata and configuration.
46-
:type descriptor: ~Tool_Client.models.FoundryTool
47-
"""
48-
self._client = client
49-
self._descriptor = descriptor
50-
self.name = descriptor.name
51-
self.description = descriptor.description
52-
self.metadata = dict(descriptor.metadata)
53-
self.source = descriptor.source
54-
55-
def invoke(self, *args: Any, **kwargs: Any) -> Any:
56-
"""Invoke the tool synchronously.
57-
58-
:param args: Positional arguments to pass to the tool.
59-
:param kwargs: Keyword arguments to pass to the tool.
60-
:return: The result from the tool invocation.
61-
:rtype: Any
62-
"""
63-
payload = InvocationPayloadBuilder.build_payload(args, kwargs, {})
64-
return self._client._invoke_tool(self._descriptor, payload)
65-
66-
def __call__(self, *args: Any, **kwargs: Any) -> Any:
67-
return self.invoke(*args, **kwargs)
6816

6917
class AzureAIToolClient:
7018
"""Synchronous client for aggregating tools from Azure AI MCP and Tools APIs.
@@ -189,11 +137,9 @@ def invoke_tool(
189137
return self._invoke_tool(descriptor, payload, **kwargs)
190138

191139
def _resolve_tool_descriptor(
192-
self, tool: Union[AzureAITool, str, FoundryTool]
140+
self, tool: Union[str, FoundryTool]
193141
) -> FoundryTool:
194142
"""Resolve a tool reference to a descriptor."""
195-
if isinstance(tool, AzureAITool):
196-
return tool._descriptor
197143
if isinstance(tool, FoundryTool):
198144
return tool
199145
if isinstance(tool, str):

sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_client.py

Lines changed: 2 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -16,58 +16,6 @@
1616
if TYPE_CHECKING:
1717
from azure.core.credentials_async import AsyncTokenCredential
1818

19-
class AzureAITool:
20-
"""Azure AI tool wrapper for invocation.
21-
22-
Represents a single tool that can be invoked either via MCP protocol or
23-
Azure AI Tools API. This class provides a convenient interface for tool
24-
invocation and exposes tool metadata.
25-
26-
:ivar str name: The name of the tool.
27-
:ivar str description: Human-readable description of what the tool does.
28-
:ivar dict metadata: Additional metadata about the tool from the API.
29-
:ivar ~Tool_Client.models.ToolSource source:
30-
The source of the tool (MCP_TOOLS or REMOTE_TOOLS).
31-
32-
.. admonition:: Example:
33-
34-
.. literalinclude:: ../samples/simple_example.py
35-
:start-after: [START use_tool]
36-
:end-before: [END use_tool]
37-
:language: python
38-
:dedent: 4
39-
:caption: Using an AzureAITool instance.
40-
"""
41-
42-
def __init__(self, client: "AzureAIToolClient", descriptor: FoundryTool) -> None:
43-
"""Initialize an Azure AI Tool.
44-
45-
:param client: Parent client instance for making API calls.
46-
:type client: AzureAIToolClient
47-
:param descriptor: Tool descriptor containing metadata and configuration.
48-
:type descriptor: ~Tool_Client.models.FoundryTool
49-
"""
50-
self._client = client
51-
self._descriptor = descriptor
52-
self.name = descriptor.name
53-
self.description = descriptor.description
54-
self.metadata = dict(descriptor.metadata)
55-
self.source = descriptor.source
56-
57-
async def invoke(self, *args: Any, **kwargs: Any) -> Any:
58-
"""Invoke the tool asynchronously.
59-
60-
:param args: Positional arguments to pass to the tool.
61-
:param kwargs: Keyword arguments to pass to the tool.
62-
:return: The result from the tool invocation.
63-
:rtype: Any
64-
"""
65-
payload = InvocationPayloadBuilder.build_payload(args, kwargs, {})
66-
return await self._client._invoke_tool(self._descriptor, payload)
67-
68-
async def __call__(self, *args: Any, **kwargs: Any) -> Any:
69-
return await self.invoke(*args, **kwargs)
70-
7119
class AzureAIToolClient:
7220
"""Asynchronous client for aggregating tools from Azure AI MCP and Tools APIs.
7321
@@ -177,7 +125,7 @@ async def _invoker(*args, **kwargs):
177125

178126
async def invoke_tool(
179127
self,
180-
tool: Union[AzureAITool, str, FoundryTool],
128+
tool: Union[str, FoundryTool],
181129
*args: Any,
182130
**kwargs: Any,
183131
) -> Any:
@@ -193,11 +141,9 @@ async def invoke_tool(
193141
return await self._invoke_tool(descriptor, payload, **kwargs)
194142

195143
async def _resolve_tool_descriptor(
196-
self, tool: Union[AzureAITool, str, FoundryTool]
144+
self, tool: Union[str, FoundryTool]
197145
) -> FoundryTool:
198146
"""Resolve a tool reference to a descriptor."""
199-
if isinstance(tool, AzureAITool):
200-
return tool._descriptor
201147
if isinstance(tool, FoundryTool):
202148
return tool
203149
if isinstance(tool, str):

sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,4 @@ class Constants:
1111
AGENT_LOG_LEVEL = "AGENT_LOG_LEVEL"
1212
AGENT_DEBUG_ERRORS = "AGENT_DEBUG_ERRORS"
1313
ENABLE_APPLICATION_INSIGHTS_LOGGER = "AGENT_APP_INSIGHTS_ENABLED"
14+
AZURE_AI_WORKSPACE_ENDPOINT = "AZURE_AI_WORKSPACE_ENDPOINT"

sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py

Lines changed: 28 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -88,22 +88,23 @@ def set_run_context_to_context_var(self, run_context):
8888
ctx.update(res)
8989
request_context.set(ctx)
9090

91-
def set_user_info_to_context_var(self, request):
92-
user_info: UserInfo = {}
91+
def set_user_info_to_context_var(self, request) -> UserInfo:
92+
user_info: UserInfo = None
9393
try:
9494
object_id_header = request.headers.get("x-aml-oid", None)
95-
tenant_id_header = request.headers.get("x-aml-tenant-id", None)
96-
97-
if object_id_header:
98-
user_info["object_id"] = object_id_header
99-
if tenant_id_header:
100-
user_info["tenant_id"] = tenant_id_header
95+
tenant_id_header = request.headers.get("x-aml-tid", None)
96+
if not object_id_header and not tenant_id_header:
97+
return None
98+
user_info = UserInfo(
99+
objectId=object_id_header,
100+
tenantId=tenant_id_header
101+
)
101102

102103
except Exception as e:
103104
logger.error(f"Failed to parse X-User-Info header: {e}", exc_info=True)
104105
if user_info:
105106
ctx = request_context.get() or {}
106-
for key, value in user_info.items():
107+
for key, value in user_info.to_dict().items():
107108
ctx[f"azure.ai.agentserver.user.{key}"] = str(value)
108109
request_context.set(ctx)
109110
return user_info
@@ -340,12 +341,26 @@ def get_tool_client(
340341
logger.debug("Creating AzureAIToolClient with tools: %s", tools)
341342
if not self.credentials:
342343
raise ValueError("Credentials are required to create Tool Client.")
343-
return AzureAIToolClient(
344+
345+
workspace_endpoint = os.getenv(Constants.AZURE_AI_WORKSPACE_ENDPOINT)
346+
if workspace_endpoint:
347+
agent_name = os.getenv(Constants.AGENT_NAME)
348+
if not agent_name:
349+
raise ValueError("AGENT_NAME environment variable is required when using workspace endpoint.")
350+
return AzureAIToolClient(
351+
endpoint=workspace_endpoint,
352+
credential=self.credentials,
353+
tools=tools,
354+
user=user_info,
355+
agent_name=agent_name,
356+
)
357+
else:
358+
return AzureAIToolClient(
344359
endpoint=os.getenv(Constants.AZURE_AI_PROJECT_ENDPOINT),
345360
credential=self.credentials,
346-
tools = tools,
347-
user = user_info,
348-
)
361+
tools=tools,
362+
user=user_info,
363+
)
349364

350365

351366
def _event_to_sse_chunk(event: ResponseStreamEvent) -> str:

sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from ...models.projects import AgentId, AgentReference, ResponseConversation1
88
from .id_generator.foundry_id_generator import FoundryIdGenerator
99
from .id_generator.id_generator import IdGenerator
10-
10+
from ...client.tools._model_base import UserInfo
1111
logger = get_logger()
1212

1313

@@ -19,7 +19,7 @@ def __init__(self, payload: dict, **kwargs: Any) -> None:
1919
self._response_id = self._id_generator.response_id
2020
self._conversation_id = self._id_generator.conversation_id
2121
self._stream = self.request.get("stream", False)
22-
self._user_info = kwargs.get("user_info", {})
22+
self._user_info = kwargs.get("user_info", None)
2323
self._agent_tools = kwargs.get("agent_tools", [])
2424

2525
@property
@@ -70,7 +70,7 @@ def get_tools(self) -> list:
7070
return self._agent_tools
7171
return request_tools
7272

73-
def get_user_info(self) -> dict:
73+
def get_user_info(self) -> UserInfo:
7474
return self._user_info
7575

7676

sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
from ._version import VERSION
99
from .tool_client import ToolClient
10+
from .langgraph import LangGraphAdapter
1011

1112
if TYPE_CHECKING: # pragma: no cover
1213
from . import models
@@ -19,7 +20,6 @@ def from_langgraph(
1920
state_converter: Optional["models.LanggraphStateConverter"] = None,
2021
**kwargs: Any
2122
) -> "LangGraphAdapter":
22-
from .langgraph import LangGraphAdapter
2323

2424
return LangGraphAdapter(agent, credentials=credentials, state_converter=state_converter, **kwargs)
2525

sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ def __init__(
7070
:param state_converter: custom state converter. Required if graph state is not MessagesState.
7171
:type state_converter: Optional[LanggraphStateConverter]
7272
"""
73-
super().__init__(credentials=credentials, **kwargs)
73+
super().__init__(credentials=credentials, **kwargs) # pylint: disable=unexpected-keyword-arg
7474
self._graph_or_factory: Union[CompiledStateGraph, GraphFactory] = graph
7575
self._resolved_graph: "Optional[CompiledStateGraph]" = None
7676
self.azure_ai_tracer = None
@@ -140,7 +140,7 @@ async def _resolve_graph(self, context: AgentRunContext):
140140

141141

142142
# Create ToolClient with credentials
143-
tool_client = self.get_tool_client(tools = context.get_tools(), user_info = context.get_user_info())
143+
tool_client = self.get_tool_client(tools = context.get_tools(), user_info = context.get_user_info()) # pylint: disable=no-member
144144
tool_client_wrapper = ToolClient(tool_client)
145145
tools = await tool_client_wrapper.list_tools()
146146
# Call the factory function with ToolClient
@@ -153,7 +153,7 @@ async def _resolve_graph(self, context: AgentRunContext):
153153
self._resolved_graph = result
154154

155155
# Validate and set up state converter if not already set from initialization
156-
if not self.state_converter:
156+
if not self.state_converter and self._resolved_graph is not None:
157157
if is_state_schema_valid(self._resolved_graph.builder.state_schema):
158158
self.state_converter = LanggraphMessageStateConverter()
159159
else:
@@ -178,13 +178,13 @@ async def _resolve_graph_for_request(self, context: AgentRunContext):
178178
logger.debug("Resolving fresh graph from factory function for request")
179179

180180
# Create ToolClient with credentials
181-
tool_client = self.get_tool_client(tools = context.get_tools(), user_info = context.get_user_info())
181+
tool_client = self.get_tool_client(tools = context.get_tools(), user_info = context.get_user_info()) # pylint: disable=no-member
182182
tool_client_wrapper = ToolClient(tool_client)
183183
tools = await tool_client_wrapper.list_tools()
184184
# Call the factory function with ToolClient
185185
# Support both sync and async factories
186186
import inspect
187-
result = self._graph_or_factory(tools)
187+
result = self._graph_or_factory(tools) # type: ignore[operator]
188188
if inspect.iscoroutine(result):
189189
graph = await result
190190
else:

sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def __init__(self, tool_client: "AzureAIToolClient") -> None:
6464
:type tool_client: ~azure.ai.agentserver.core.client.tools.aio.AzureAIToolClient
6565
"""
6666
self._tool_client = tool_client
67-
self._langchain_tools_cache: List[StructuredTool] = None
67+
self._langchain_tools_cache: Optional[List[StructuredTool]] = None
6868

6969
async def list_tools(self) -> List[StructuredTool]:
7070
"""List all available tools as LangChain BaseTool instances.
@@ -115,7 +115,7 @@ def _convert_to_langchain_tool(self, azure_tool: "FoundryTool") -> StructuredToo
115115
# Create a Pydantic model for the tool's input schema
116116
args_schema = self._create_pydantic_model(
117117
tool_name=azure_tool.name,
118-
schema=input_schema
118+
schema=dict(input_schema)
119119
)
120120

121121
# Create an async function that invokes the tool
@@ -176,13 +176,13 @@ def _create_pydantic_model(
176176
)
177177
else:
178178
field_definitions[prop_name] = (
179-
Optional[prop_type],
180-
Field(None, description=prop_description)
179+
prop_type,
180+
Field(default=None, description=prop_description)
181181
)
182182

183183
# Create the model dynamically
184-
model_name = f"{tool_name.replace('-', '_').replace(' ', '_').title()}Input"
185-
return create_model(model_name, **field_definitions)
184+
model_name = f"{tool_name.replace('-', '_').replace(' ', '_').title()}-Input"
185+
return create_model(model_name, **field_definitions) # type: ignore[call-overload]
186186

187187
def _json_type_to_python_type(self, json_type: str) -> type:
188188
"""Convert JSON schema type to Python type.

0 commit comments

Comments
 (0)