diff --git a/eng/.docsettings.yml b/eng/.docsettings.yml index 715e5002af8b..6d53ed8ee718 100644 --- a/eng/.docsettings.yml +++ b/eng/.docsettings.yml @@ -22,7 +22,7 @@ omitted_paths: language: python root_check_enabled: True required_readme_sections: - - ^Azure (.+ client library for Python|Smoke Test for Python) + - ^Azure (.+ client library for Python|Smoke Test for Python|Agent Server Adapter for Python) - ^Getting started$ - ^Key concepts$ - ^Examples$ diff --git a/eng/tools/azure-sdk-tools/ci_tools/functions.py b/eng/tools/azure-sdk-tools/ci_tools/functions.py index 09295b829000..d3e50c7f3f0c 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/functions.py @@ -55,7 +55,12 @@ "sdk/textanalytics/azure-ai-textanalytics", ] -TEST_COMPATIBILITY_MAP = {"azure-ai-ml": ">=3.7"} +TEST_COMPATIBILITY_MAP = { + "azure-ai-ml": ">=3.7", + "azure-ai-agentserver-core": ">=3.9", # override to allow build with python 3.9 + "azure-ai-agentserver-langgraph": ">=3.9", # override to allow build with python 3.9 + "azure-ai-agentserver-agentframework": ">=3.9", # override to allow build with python 3.9 +} TEST_PYTHON_DISTRO_INCOMPATIBILITY_MAP = { "azure-storage-blob": "pypy", "azure-storage-queue": "pypy", diff --git a/sdk/ai/azure-ai-agentserver-agentframework/CHANGELOG.md b/sdk/ai/azure-ai-agentserver-agentframework/CHANGELOG.md new file mode 100644 index 000000000000..7ce1742693b8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/CHANGELOG.md @@ -0,0 +1,7 @@ +# Release History + +## 1.0.0a1 (2025-11-06) + +### Features Added + +First version diff --git a/sdk/ai/azure-ai-agentserver-agentframework/LICENSE b/sdk/ai/azure-ai-agentserver-agentframework/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/MANIFEST.in b/sdk/ai/azure-ai-agentserver-agentframework/MANIFEST.in new file mode 100644 index 000000000000..062fdaf366a0 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/MANIFEST.in @@ -0,0 +1,9 @@ +include *.md +include LICENSE +recursive-include tests *.py +recursive-include samples *.py *.md +recursive-include doc *.rst *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/agentserver/__init__.py +include azure/ai/agentserver/agentframework/py.typed \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/README.md b/sdk/ai/azure-ai-agentserver-agentframework/README.md new file mode 100644 index 000000000000..84bb8f8179a0 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/README.md @@ -0,0 +1,58 @@ +# Azure AI Agent Server adapter for agent-framework + + +## Getting started + +```bash +pip install azure-ai-agentserver-agentframework +``` + + +## Key concepts + +Azure AI Agent Server wraps your Agent-framework agent, and host it on the cloud. + + +## Examples + +```python +# your existing agent +from my_framework_agent import my_awesome_agent + +# agent framework utils +from azure.ai.agentserver.agentframework import from_agent_framework + +if __name__ == "__main__": + # with this simple line, your agent will be hosted on http://localhost:8088 + from_agent_framework(my_awesome_agent).run() + +``` + +## Troubleshooting + +First run your agent with azure-ai-agentserver-agentframework locally. + +If it works on local by failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project. + + +## Next steps + +Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agentserver-agentframework/samples) folder. There are several samples for you to build your agent with azure-ai-agentserver + + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/__init__.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/__init__.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/__init__.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py new file mode 100644 index 000000000000..af980a34799f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py @@ -0,0 +1,16 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +__path__ = __import__("pkgutil").extend_path(__path__, __name__) + +from ._version import VERSION + + +def from_agent_framework(agent): + from .agent_framework import AgentFrameworkCBAgent + + return AgentFrameworkCBAgent(agent) + + +__all__ = ["from_agent_framework"] +__version__ = VERSION diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_version.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_version.py new file mode 100644 index 000000000000..44465a1b2f12 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0a1" diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py new file mode 100644 index 000000000000..7177b522d2a9 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py @@ -0,0 +1,153 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=logging-fstring-interpolation +from __future__ import annotations + +import asyncio # pylint: disable=do-not-import-asyncio +import os +from typing import Any, AsyncGenerator, Union + +from agent_framework import AgentProtocol +from agent_framework.azure import AzureAIAgentClient # pylint: disable=no-name-in-module +from opentelemetry import trace + +from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent +from azure.ai.agentserver.core.constants import Constants as AdapterConstants +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.models import ( + CreateResponse, + Response as OpenAIResponse, + ResponseStreamEvent, +) +from azure.ai.projects import AIProjectClient +from azure.identity import DefaultAzureCredential + +from .models.agent_framework_input_converters import AgentFrameworkInputConverter +from .models.agent_framework_output_non_streaming_converter import ( + AgentFrameworkOutputNonStreamingConverter, +) +from .models.agent_framework_output_streaming_converter import AgentFrameworkOutputStreamingConverter +from .models.constants import Constants + +logger = get_logger() + + +class AgentFrameworkCBAgent(FoundryCBAgent): + """ + Adapter class for integrating Agent Framework agents with the FoundryCB agent interface. + + This class wraps an Agent Framework `AgentProtocol` instance and provides a unified interface + for running agents in both streaming and non-streaming modes. It handles input and output + conversion between the Agent Framework and the expected formats for FoundryCB agents. + + Parameters: + agent (AgentProtocol): An instance of an Agent Framework agent to be adapted. + + Usage: + - Instantiate with an Agent Framework agent. + - Call `agent_run` with a `CreateResponse` request body to execute the agent. + - Supports both streaming and non-streaming responses based on the `stream` flag. + """ + + def __init__(self, agent: AgentProtocol): + super().__init__() + self.agent = agent + logger.info(f"Initialized AgentFrameworkCBAgent with agent: {type(agent).__name__}") + + def _resolve_stream_timeout(self, request_body: CreateResponse) -> float: + """Resolve idle timeout for streaming updates. + + Order of precedence: + 1) request_body.stream_timeout_s (if provided) + 2) env var Constants.AGENTS_ADAPTER_STREAM_TIMEOUT_S + 3) Constants.DEFAULT_STREAM_TIMEOUT_S + + :param request_body: The CreateResponse request body. + :type request_body: CreateResponse + + :return: The resolved stream timeout in seconds. + :rtype: float + """ + override = request_body.get("stream_timeout_s", None) + if override is not None: + return float(override) + env_val = os.getenv(Constants.AGENTS_ADAPTER_STREAM_TIMEOUT_S) + return float(env_val) if env_val is not None else float(Constants.DEFAULT_STREAM_TIMEOUT_S) + + def init_tracing(self): + exporter = os.environ.get(AdapterConstants.OTEL_EXPORTER_ENDPOINT) + app_insights_conn_str = os.environ.get(AdapterConstants.APPLICATION_INSIGHTS_CONNECTION_STRING) + project_endpoint = os.environ.get(AdapterConstants.AZURE_AI_PROJECT_ENDPOINT) + + if project_endpoint: + project_client = AIProjectClient(endpoint=project_endpoint, credential=DefaultAzureCredential()) + agent_client = AzureAIAgentClient(project_client=project_client) + agent_client.setup_azure_ai_observability() + elif exporter or app_insights_conn_str: + os.environ["WORKFLOW_ENABLE_OTEL"] = "true" + from agent_framework.observability import setup_observability + + setup_observability( + enable_sensitive_data=True, + otlp_endpoint=exporter, + applicationinsights_connection_string=app_insights_conn_str, + ) + self.tracer = trace.get_tracer(__name__) + + async def agent_run( + self, context: AgentRunContext + ) -> Union[ + OpenAIResponse, + AsyncGenerator[ResponseStreamEvent, Any], + ]: + logger.info(f"Starting agent_run with stream={context.stream}") + request_input = context.request.get("input") + + input_converter = AgentFrameworkInputConverter() + message = input_converter.transform_input(request_input) + logger.debug(f"Transformed input message type: {type(message)}") + + # Use split converters + if context.stream: + logger.info("Running agent in streaming mode") + streaming_converter = AgentFrameworkOutputStreamingConverter(context) + + async def stream_updates(): + update_count = 0 + timeout_s = self._resolve_stream_timeout(context.request) + logger.info("Starting streaming with idle-timeout=%.2fs", timeout_s) + for ev in streaming_converter.initial_events(): + yield ev + + # Iterate with per-update timeout; terminate if idle too long + aiter = self.agent.run_stream(message).__aiter__() + while True: + try: + update = await asyncio.wait_for(aiter.__anext__(), timeout=timeout_s) + except StopAsyncIteration: + logger.debug("Agent streaming iterator finished (StopAsyncIteration)") + break + except asyncio.TimeoutError: + logger.warning("Streaming idle timeout reached (%.1fs); terminating stream.", timeout_s) + for ev in streaming_converter.completion_events(): + yield ev + return + update_count += 1 + transformed = streaming_converter.transform_output_for_streaming(update) + for event in transformed: + yield event + for ev in streaming_converter.completion_events(): + yield ev + logger.info("Streaming completed with %d updates", update_count) + + return stream_updates() + + # Non-streaming path + logger.info("Running agent in non-streaming mode") + non_streaming_converter = AgentFrameworkOutputNonStreamingConverter(context) + result = await self.agent.run(message) + logger.debug(f"Agent run completed, result type: {type(result)}") + transformed_result = non_streaming_converter.transform_output_for_response(result) + logger.info("Agent run and transformation completed successfully") + return transformed_result diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/__init__.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/__init__.py new file mode 100644 index 000000000000..fdf8caba9ef5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_input_converters.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_input_converters.py new file mode 100644 index 000000000000..993be43e85c8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_input_converters.py @@ -0,0 +1,120 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=too-many-nested-blocks,too-many-return-statements,too-many-branches +# mypy: disable-error-code="no-redef" +from __future__ import annotations + +from typing import Dict, List + +from agent_framework import ChatMessage, Role as ChatRole +from agent_framework._types import TextContent + +from azure.ai.agentserver.core.logger import get_logger + +logger = get_logger() + + +class AgentFrameworkInputConverter: + """Normalize inputs for agent.run. + + Accepts: str | List | None + Returns: None | str | ChatMessage | list[str] | list[ChatMessage] + """ + + def transform_input( + self, + input: str | List[Dict] | None, + ) -> str | ChatMessage | list[str] | list[ChatMessage] | None: + logger.debug("Transforming input of type: %s", type(input)) + + if input is None: + return None + + if isinstance(input, str): + return input + + try: + if isinstance(input, list): + messages: list[str | ChatMessage] = [] + + for item in input: + # Case 1: ImplicitUserMessage with content as str or list of ItemContentInputText + if self._is_implicit_user_message(item): + content = item.get("content", None) + if isinstance(content, str): + messages.append(content) + elif isinstance(content, list): + text_parts: list[str] = [] + for content_item in content: + text_content = self._extract_input_text(content_item) + if text_content: + text_parts.append(text_content) + if text_parts: + messages.append(" ".join(text_parts)) + + # Case 2: Explicit message params (user/assistant/system) + elif ( + item.get("type") == "message" + and item.get("role") is not None + and item.get("content") is not None + ): + role_map = { + "user": ChatRole.USER, + "assistant": ChatRole.ASSISTANT, + "system": ChatRole.SYSTEM, + } + role = role_map.get(item.get("role", "user"), ChatRole.USER) + + content_text = "" + item_content = item.get("content", None) + if item_content and isinstance(item_content, list): + text_parts: list[str] = [] + for content_item in item_content: + item_text = self._extract_input_text(content_item) + if item_text: + text_parts.append(item_text) + content_text = " ".join(text_parts) if text_parts else "" + elif item_content and isinstance(item_content, str): + content_text = str(item_content) + + if content_text: + messages.append(ChatMessage(role=role, text=content_text)) + + # Determine the most natural return type + if not messages: + return None + if len(messages) == 1: + return messages[0] + if all(isinstance(m, str) for m in messages): + return [m for m in messages if isinstance(m, str)] + if all(isinstance(m, ChatMessage) for m in messages): + return [m for m in messages if isinstance(m, ChatMessage)] + + # Mixed content: coerce ChatMessage to str by extracting TextContent parts + result: list[str] = [] + for msg in messages: + if isinstance(msg, ChatMessage): + text_parts: list[str] = [] + for c in getattr(msg, "contents", []) or []: + if isinstance(c, TextContent): + text_parts.append(c.text) + result.append(" ".join(text_parts) if text_parts else str(msg)) + else: + result.append(str(msg)) + return result + + raise TypeError(f"Unsupported input type: {type(input)}") + except Exception as e: + logger.error("Error processing messages: %s", e, exc_info=True) + raise Exception(f"Error processing messages: {e}") from e # pylint: disable=broad-exception-raised + + def _is_implicit_user_message(self, item: Dict) -> bool: + return "content" in item and "role" not in item and "type" not in item + + def _extract_input_text(self, content_item: Dict) -> str: + if content_item.get("type") == "input_text" and "text" in content_item: + text_content = content_item.get("text") + if isinstance(text_content, str): + return text_content + return None # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py new file mode 100644 index 000000000000..805a5eeb9dec --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py @@ -0,0 +1,232 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import datetime +import json +from typing import Any, List + +from agent_framework import AgentRunResponse, FunctionResultContent +from agent_framework._types import FunctionCallContent, TextContent + +from azure.ai.agentserver.core import AgentRunContext +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.models import Response as OpenAIResponse +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + ResponsesAssistantMessageItemResource, +) + +from .agent_id_generator import AgentIdGenerator +from .constants import Constants + +logger = get_logger() + + +class AgentFrameworkOutputNonStreamingConverter: # pylint: disable=name-too-long + """Non-streaming converter: AgentRunResponse -> OpenAIResponse.""" + + def __init__(self, context: AgentRunContext): + self._context = context + self._response_id = None + self._response_created_at = None + + def _ensure_response_started(self) -> None: + if not self._response_id: + self._response_id = self._context.response_id # type: ignore + if not self._response_created_at: + self._response_created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) # type: ignore + + def _build_item_content_output_text(self, text: str) -> ItemContentOutputText: + return ItemContentOutputText(text=text, annotations=[]) + + def _new_assistant_message_item(self, message_text: str) -> ResponsesAssistantMessageItemResource: + item_content = self._build_item_content_output_text(message_text) + return ResponsesAssistantMessageItemResource( + id=self._context.id_generator.generate_message_id(), status="completed", content=[item_content] + ) + + def transform_output_for_response(self, response: AgentRunResponse) -> OpenAIResponse: + """Build an OpenAIResponse capturing all supported content types. + + Previously this method only emitted text message items. We now also capture: + - FunctionCallContent -> function_call output item + - FunctionResultContent -> function_call_output item + + to stay aligned with the streaming converter so no output is lost. + + :param response: The AgentRunResponse from the agent framework. + :type response: AgentRunResponse + + :return: The constructed OpenAIResponse. + :rtype: OpenAIResponse + """ + logger.debug("Transforming non-streaming response (messages=%d)", len(response.messages)) + self._ensure_response_started() + + completed_items: List[dict] = [] + + for i, message in enumerate(response.messages): + logger.debug("Non-streaming: processing message index=%d type=%s", i, type(message).__name__) + contents = getattr(message, "contents", None) + if not contents: + continue + for j, content in enumerate(contents): + logger.debug(" content index=%d in message=%d type=%s", j, i, type(content).__name__) + self._append_content_item(content, completed_items) + + response_data = self._construct_response_data(completed_items) + openai_response = OpenAIResponse(response_data) + logger.info( + "OpenAIResponse built (id=%s, items=%d)", + self._response_id, + len(completed_items), + ) + return openai_response + + # ------------------------- helper append methods ------------------------- + + def _append_content_item(self, content: Any, sink: List[dict]) -> None: + """Dispatch a content object to the appropriate append helper. + + Adding this indirection keeps the main transform method compact and makes it + simpler to extend with new content types later. + + :param content: The content object to append. + :type content: Any + :param sink: The list to append the converted content dict to. + :type sink: List[dict] + + :return: None + :rtype: None + """ + if isinstance(content, TextContent): + self._append_text_content(content, sink) + elif isinstance(content, FunctionCallContent): + self._append_function_call_content(content, sink) + elif isinstance(content, FunctionResultContent): + self._append_function_result_content(content, sink) + else: + logger.debug("unsupported content type skipped: %s", type(content).__name__) + + def _append_text_content(self, content: TextContent, sink: List[dict]) -> None: + text_value = getattr(content, "text", None) + if not text_value: + return + item_id = self._context.id_generator.generate_message_id() + sink.append( + { + "id": item_id, + "type": "message", + "status": "completed", + "role": "assistant", + "content": [ + { + "type": "output_text", + "text": text_value, + "annotations": [], + "logprobs": [], + } + ], + } + ) + logger.debug(" added message item id=%s text_len=%d", item_id, len(text_value)) + + def _append_function_call_content(self, content: FunctionCallContent, sink: List[dict]) -> None: + name = getattr(content, "name", "") or "" + arguments = getattr(content, "arguments", "") + if not isinstance(arguments, str): + try: + arguments = json.dumps(arguments) + except Exception: # pragma: no cover - fallback # pylint: disable=broad-exception-caught + arguments = str(arguments) + call_id = getattr(content, "call_id", None) or self._context.id_generator.generate_function_call_id() + func_item_id = self._context.id_generator.generate_function_call_id() + sink.append( + { + "id": func_item_id, + "type": "function_call", + "status": "completed", + "call_id": call_id, + "name": name, + "arguments": arguments or "", + } + ) + logger.debug( + " added function_call item id=%s call_id=%s name=%s args_len=%d", + func_item_id, + call_id, + name, + len(arguments or ""), + ) + + def _append_function_result_content(self, content: FunctionResultContent, sink: List[dict]) -> None: + # Coerce the function result into a simple display string. + result = [] + raw = getattr(content, "result", None) + if isinstance(raw, str): + result = [raw] + elif isinstance(raw, list): + for item in raw: + result.append(self._coerce_result_text(item)) # type: ignore + call_id = getattr(content, "call_id", None) or "" + func_out_id = self._context.id_generator.generate_function_output_id() + sink.append( + { + "id": func_out_id, + "type": "function_call_output", + "status": "completed", + "call_id": call_id, + "output": json.dumps(result) if len(result) > 0 else "", + } + ) + logger.debug( + "added function_call_output item id=%s call_id=%s output_len=%d", + func_out_id, + call_id, + len(result), + ) + + # ------------- simple normalization helper ------------------------- + def _coerce_result_text(self, value: Any) -> str | dict: + """ + Return a string if value is already str or a TextContent-like object; else str(value). + + :param value: The value to coerce. + :type value: Any + + :return: The coerced string or dict. + :rtype: str | dict + """ + if value is None: + return "" + if isinstance(value, str): + return value + # Direct TextContent instance + if isinstance(value, TextContent): + content_payload = {"type": "text", "text": getattr(value, "text", "")} + return content_payload + + return "" + + def _construct_response_data(self, output_items: List[dict]) -> dict: + agent_id = AgentIdGenerator.generate(self._context) + + response_data = { + "object": "response", + "metadata": {}, + "agent": agent_id, + "conversation": self._context.get_conversation_object(), + "type": "message", + "role": "assistant", + "temperature": Constants.DEFAULT_TEMPERATURE, + "top_p": Constants.DEFAULT_TOP_P, + "user": "", + "id": self._context.response_id, + "created_at": self._response_created_at, + "output": output_items, + "parallel_tool_calls": True, + "status": "completed", + } + return response_data diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py new file mode 100644 index 000000000000..d9bc3199efb5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py @@ -0,0 +1,595 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=attribute-defined-outside-init,protected-access +# mypy: disable-error-code="call-overload,assignment,arg-type" +from __future__ import annotations + +import datetime +import json +import uuid +from typing import Any, List, Optional, cast + +from agent_framework import AgentRunResponseUpdate, FunctionApprovalRequestContent, FunctionResultContent +from agent_framework._types import ( + ErrorContent, + FunctionCallContent, + TextContent, +) + +from azure.ai.agentserver.core import AgentRunContext +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.models import ( + Response as OpenAIResponse, + ResponseStreamEvent, +) +from azure.ai.agentserver.core.models.projects import ( + FunctionToolCallItemResource, + FunctionToolCallOutputItemResource, + ItemContentOutputText, + ResponseCompletedEvent, + ResponseContentPartAddedEvent, + ResponseContentPartDoneEvent, + ResponseCreatedEvent, + ResponseErrorEvent, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseInProgressEvent, + ResponseOutputItemAddedEvent, + ResponseOutputItemDoneEvent, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + +from .agent_id_generator import AgentIdGenerator + +logger = get_logger() + + +class _BaseStreamingState: + """Base interface for streaming state handlers.""" + + def prework(self, ctx: Any) -> List[ResponseStreamEvent]: # pylint: disable=unused-argument + return [] + + def convert_content(self, ctx: Any, content) -> List[ResponseStreamEvent]: # pylint: disable=unused-argument + raise NotImplementedError + + def afterwork(self, ctx: Any) -> List[ResponseStreamEvent]: # pylint: disable=unused-argument + return [] + + +class _TextContentStreamingState(_BaseStreamingState): + """State handler for text and reasoning-text content during streaming.""" + + def __init__(self, context: AgentRunContext) -> None: + self.context = context + self.item_id = None + self.output_index = None + self.text_buffer = "" + self.text_part_started = False + + def prework(self, ctx: Any) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if self.item_id is not None: + return events + + # Start a new assistant message item (in_progress) + self.item_id = self.context.id_generator.generate_message_id() + self.output_index = ctx._next_output_index # pylint: disable=protected-access + ctx._next_output_index += 1 + + message_item = ResponsesAssistantMessageItemResource( + id=self.item_id, + status="in_progress", + content=[], + ) + + events.append( + ResponseOutputItemAddedEvent( + sequence_number=ctx.next_sequence(), + output_index=self.output_index, + item=message_item, + ) + ) + + if not self.text_part_started: + empty_part = ItemContentOutputText(text="", annotations=[], logprobs=[]) + events.append( + ResponseContentPartAddedEvent( + sequence_number=ctx.next_sequence(), + item_id=self.item_id, + output_index=self.output_index, + content_index=0, + part=empty_part, + ) + ) + self.text_part_started = True + return events + + def convert_content(self, ctx: Any, content: TextContent) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if isinstance(content, TextContent): + delta = content.text or "" + else: + delta = getattr(content, "text", None) or getattr(content, "reasoning", "") or "" + + # buffer accumulated text + self.text_buffer += delta + + # emit delta event for text + assert self.item_id is not None, "Text state not initialized: missing item_id" + assert self.output_index is not None, "Text state not initialized: missing output_index" + events.append( + ResponseTextDeltaEvent( + sequence_number=ctx.next_sequence(), + item_id=self.item_id, + output_index=self.output_index, + content_index=0, + delta=delta, + ) + ) + return events + + def afterwork(self, ctx: Any) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if not self.item_id: + return events + + full_text = self.text_buffer + assert self.item_id is not None and self.output_index is not None + events.append( + ResponseTextDoneEvent( + sequence_number=ctx.next_sequence(), + item_id=self.item_id, + output_index=self.output_index, + content_index=0, + text=full_text, + ) + ) + final_part = ItemContentOutputText(text=full_text, annotations=[], logprobs=[]) + events.append( + ResponseContentPartDoneEvent( + sequence_number=ctx.next_sequence(), + item_id=self.item_id, + output_index=self.output_index, + content_index=0, + part=final_part, + ) + ) + completed_item = ResponsesAssistantMessageItemResource( + id=self.item_id, status="completed", content=[final_part] + ) + events.append( + ResponseOutputItemDoneEvent( + sequence_number=ctx.next_sequence(), + output_index=self.output_index, + item=completed_item, + ) + ) + ctx._last_completed_text = full_text # pylint: disable=protected-access + # store for final response + ctx._completed_output_items.append( + { + "id": self.item_id, + "type": "message", + "status": "completed", + "content": [ + { + "type": "output_text", + "text": full_text, + "annotations": [], + "logprobs": [], + } + ], + "role": "assistant", + } + ) + # reset state + self.item_id = None + self.output_index = None + self.text_buffer = "" + self.text_part_started = False + return events + + +class _FunctionCallStreamingState(_BaseStreamingState): + """State handler for function_call content during streaming.""" + + def __init__(self, context: AgentRunContext) -> None: + self.context = context + self.item_id = None + self.output_index = None + self.call_id = None + self.name = None + self.args_buffer = "" + self.requires_approval = False + self.approval_request_id: str | None = None + + def prework(self, ctx: Any) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if self.item_id is not None: + return events + # initialize function-call item + self.item_id = self.context.id_generator.generate_function_call_id() + self.output_index = ctx._next_output_index + ctx._next_output_index += 1 + + self.call_id = self.call_id or str(uuid.uuid4()) + function_item = FunctionToolCallItemResource( + id=self.item_id, + status="in_progress", + call_id=self.call_id, + name=self.name or "", + arguments="", + ) + events.append( + ResponseOutputItemAddedEvent( + sequence_number=ctx.next_sequence(), + output_index=self.output_index, + item=function_item, + ) + ) + return events + + def convert_content(self, ctx: Any, content: FunctionCallContent) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + # record identifiers (once available) + self.name = getattr(content, "name", None) or self.name or "" + self.call_id = getattr(content, "call_id", None) or self.call_id or str(uuid.uuid4()) + + args_delta = content.arguments if isinstance(content.arguments, str) else json.dumps(content.arguments) + args_delta = args_delta or "" + self.args_buffer += args_delta + assert self.item_id is not None and self.output_index is not None + for ch in args_delta: + events.append( + ResponseFunctionCallArgumentsDeltaEvent( + sequence_number=ctx.next_sequence(), + item_id=self.item_id, + output_index=self.output_index, + delta=ch, + ) + ) + + # finalize if arguments are detected to be complete + is_done = bool( + getattr(content, "is_final", False) + or getattr(content, "final", False) + or getattr(content, "done", False) + or getattr(content, "arguments_final", False) + or getattr(content, "arguments_done", False) + or getattr(content, "finish", False) + ) + if not is_done and self.args_buffer: + try: + json.loads(self.args_buffer) + is_done = True + except Exception: # pylint: disable=broad-exception-caught + pass + + if is_done: + events.append( + ResponseFunctionCallArgumentsDoneEvent( + sequence_number=ctx.next_sequence(), + item_id=self.item_id, + output_index=self.output_index, + arguments=self.args_buffer, + ) + ) + events.extend(self.afterwork(ctx)) + return events + + def afterwork(self, ctx: Any) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if not self.item_id: + return events + assert self.call_id is not None + done_item = FunctionToolCallItemResource( + id=self.item_id, + status="completed", + call_id=self.call_id, + name=self.name or "", + arguments=self.args_buffer, + ) + assert self.output_index is not None + events.append( + ResponseOutputItemDoneEvent( + sequence_number=ctx.next_sequence(), + output_index=self.output_index, + item=done_item, + ) + ) + # store for final response + ctx._completed_output_items.append( + { + "id": self.item_id, + "type": "function_call", + "call_id": self.call_id, + "name": self.name or "", + "arguments": self.args_buffer, + "status": "requires_approval" if self.requires_approval else "completed", + "requires_approval": self.requires_approval, + "approval_request_id": self.approval_request_id, + } + ) + # reset + self.item_id = None + self.output_index = None + self.args_buffer = "" + self.call_id = None + self.name = None + self.requires_approval = False + self.approval_request_id = None + return events + + +class _FunctionCallOutputStreamingState(_BaseStreamingState): + """Handles function_call_output items streaming (non-chunked simple output).""" + + def __init__( + self, + context: AgentRunContext, + call_id: Optional[str] = None, + output: Optional[list[str]] = None, + ) -> None: + # Avoid mutable default argument (Ruff B006) + self.context = context + self.item_id = None + self.output_index = None + self.call_id = call_id + self.output = output if output is not None else [] + + def prework(self, ctx: Any) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if self.item_id is not None: + return events + self.item_id = self.context.id_generator.generate_function_output_id() + self.output_index = ctx._next_output_index + ctx._next_output_index += 1 + + self.call_id = self.call_id or str(uuid.uuid4()) + item = FunctionToolCallOutputItemResource( + id=self.item_id, + status="in_progress", + call_id=self.call_id, + output="", + ) + events.append( + ResponseOutputItemAddedEvent( + sequence_number=ctx.next_sequence(), + output_index=self.output_index, + item=item, + ) + ) + return events + + def convert_content(self, ctx: Any, content: Any) -> List[ResponseStreamEvent]: # no delta events for now + events: List[ResponseStreamEvent] = [] + # treat entire output as final + result = [] + raw = getattr(content, "result", None) + if isinstance(raw, str): + result = [raw or self.output] + elif isinstance(raw, list): + for item in raw: + result.append(self._coerce_result_text(item)) + self.output = json.dumps(result) if len(result) > 0 else "" + + events.extend(self.afterwork(ctx)) + return events + + def _coerce_result_text(self, value: Any) -> str | dict: + """ + Return a string if value is already str or a TextContent-like object; else str(value). + + :param value: The value to coerce. + :type value: Any + + :return: The coerced string or dict. + :rtype: str | dict + """ + if value is None: + return "" + if isinstance(value, str): + return value + # Direct TextContent instance + if isinstance(value, TextContent): + content_payload = {"type": "text", "text": getattr(value, "text", "")} + return content_payload + + return "" + + def afterwork(self, ctx: Any) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if not self.item_id: + return events + # Ensure types conform: call_id must be str (guarantee non-None) and output is a single string + str_call_id = self.call_id or "" + single_output: str = cast(str, self.output[0]) if self.output else "" + done_item = FunctionToolCallOutputItemResource( + id=self.item_id, + status="completed", + call_id=str_call_id, + output=single_output, + ) + assert self.output_index is not None + events.append( + ResponseOutputItemDoneEvent( + sequence_number=ctx.next_sequence(), + output_index=self.output_index, + item=done_item, + ) + ) + ctx._completed_output_items.append( + { + "id": self.item_id, + "type": "function_call_output", + "status": "completed", + "call_id": self.call_id, + "output": self.output, + } + ) + self.item_id = None + self.output_index = None + return events + + +class AgentFrameworkOutputStreamingConverter: + """Streaming converter using content-type-specific state handlers.""" + + def __init__(self, context: AgentRunContext) -> None: + self._context = context + # sequence numbers must start at 0 for first emitted event + self._sequence = 0 + self._response_id = None + self._response_created_at = None + self._next_output_index = 0 + self._last_completed_text = "" + self._active_state: Optional[_BaseStreamingState] = None + self._active_kind = None # "text" | "function_call" | "error" + # accumulate completed output items for final response + self._completed_output_items: List[dict] = [] + + def _ensure_response_started(self) -> None: + if not self._response_id: + self._response_id = self._context.response_id + if not self._response_created_at: + self._response_created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + + def next_sequence(self) -> int: + self._sequence += 1 + return self._sequence + + def _switch_state(self, kind: str) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if self._active_state and self._active_kind != kind: + events.extend(self._active_state.afterwork(self)) + self._active_state = None + self._active_kind = None + + if self._active_state is None: + if kind == "text": + self._active_state = _TextContentStreamingState(self._context) + elif kind == "function_call": + self._active_state = _FunctionCallStreamingState(self._context) + elif kind == "function_call_output": + self._active_state = _FunctionCallOutputStreamingState(self._context) + else: + self._active_state = None + self._active_kind = kind + if self._active_state: + events.extend(self._active_state.prework(self)) + return events + + def transform_output_for_streaming(self, update: AgentRunResponseUpdate) -> List[ResponseStreamEvent]: + logger.debug( + "Transforming streaming update with %d contents", + len(update.contents) if getattr(update, "contents", None) else 0, + ) + self._ensure_response_started() + events: List[ResponseStreamEvent] = [] + + if getattr(update, "contents", None): + for i, content in enumerate(update.contents): + logger.debug("Processing content %d: %s", i, type(content)) + if isinstance(content, TextContent): + events.extend(self._switch_state("text")) + if isinstance(self._active_state, _TextContentStreamingState): + events.extend(self._active_state.convert_content(self, content)) + elif isinstance(content, FunctionCallContent): + events.extend(self._switch_state("function_call")) + if isinstance(self._active_state, _FunctionCallStreamingState): + events.extend(self._active_state.convert_content(self, content)) + elif isinstance(content, FunctionResultContent): + events.extend(self._switch_state("function_call_output")) + if isinstance(self._active_state, _FunctionCallOutputStreamingState): + call_id = getattr(content, "call_id", None) + if call_id: + self._active_state.call_id = call_id + events.extend(self._active_state.convert_content(self, content)) + elif isinstance(content, FunctionApprovalRequestContent): + events.extend(self._switch_state("function_call")) + if isinstance(self._active_state, _FunctionCallStreamingState): + self._active_state.requires_approval = True + self._active_state.approval_request_id = getattr(content, "id", None) + events.extend(self._active_state.convert_content(self, content.function_call)) + elif isinstance(content, ErrorContent): + # errors are stateless; flush current state and emit error + events.extend(self._switch_state("error")) + events.append( + ResponseErrorEvent( + sequence_number=self.next_sequence(), + code=getattr(content, "error_code", None) or "server_error", + message=getattr(content, "message", None) or "An error occurred", + param="", + ) + ) + return events + + def finalize_last_content(self) -> List[ResponseStreamEvent]: + events: List[ResponseStreamEvent] = [] + if self._active_state: + events.extend(self._active_state.afterwork(self)) + self._active_state = None + self._active_kind = None + return events + + def build_response(self, status: str) -> OpenAIResponse: + self._ensure_response_started() + agent_id = AgentIdGenerator.generate(self._context) + response_data = { + "object": "response", + "agent_id": agent_id, + "id": self._response_id, + "status": status, + "created_at": self._response_created_at, + } + if status == "completed" and self._completed_output_items: + response_data["output"] = self._completed_output_items + return OpenAIResponse(response_data) + + # High-level helpers to emit lifecycle events for streaming + def initial_events(self) -> List[ResponseStreamEvent]: + """ + Emit ResponseCreatedEvent and an initial ResponseInProgressEvent. + + :return: List of initial response stream events. + :rtype: List[ResponseStreamEvent] + """ + self._ensure_response_started() + events: List[ResponseStreamEvent] = [] + created_response = self.build_response(status="in_progress") + events.append( + ResponseCreatedEvent( + sequence_number=self.next_sequence(), + response=created_response, + ) + ) + events.append( + ResponseInProgressEvent( + sequence_number=self.next_sequence(), + response=self.build_response(status="in_progress"), + ) + ) + return events + + def completion_events(self) -> List[ResponseStreamEvent]: + """ + Finalize any active content and emit a single ResponseCompletedEvent. + + :return: List of completion response stream events. + :rtype: List[ResponseStreamEvent] + """ + self._ensure_response_started() + events: List[ResponseStreamEvent] = [] + events.extend(self.finalize_last_content()) + completed_response = self.build_response(status="completed") + events.append( + ResponseCompletedEvent( + sequence_number=self.next_sequence(), + response=completed_response, + ) + ) + return events diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_id_generator.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_id_generator.py new file mode 100644 index 000000000000..da4045898a5e --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_id_generator.py @@ -0,0 +1,44 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Helper utilities for constructing AgentId model instances. + +Centralizes logic for safely building a `models.AgentId` from a request agent +object. We intentionally do not allow overriding the generated model's fixed +`type` literal ("agent_id"). If the provided object lacks a name, `None` is +returned so callers can decide how to handle absence. +""" + +from __future__ import annotations + +from typing import Optional + +from azure.ai.agentserver.core import AgentRunContext +from azure.ai.agentserver.core.models import projects + + +class AgentIdGenerator: + @staticmethod + def generate(context: AgentRunContext) -> Optional[projects.AgentId]: + """ + Builds an AgentId model from the request agent object in the provided context. + + :param context: The AgentRunContext containing the request. + :type context: AgentRunContext + + :return: The constructed AgentId model, or None if the request lacks an agent name. + :rtype: Optional[projects.AgentId] + """ + agent = context.request.get("agent") + if not agent: + return None + + agent_id = projects.AgentId( + { + "type": agent.type, + "name": agent.name, + "version": agent.version, + } + ) + + return agent_id diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/constants.py b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/constants.py new file mode 100644 index 000000000000..859e115e425e --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/constants.py @@ -0,0 +1,13 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +class Constants: + # streaming configuration + # Environment variable name to control idle timeout for streaming updates (seconds) + AGENTS_ADAPTER_STREAM_TIMEOUT_S = "AGENTS_ADAPTER_STREAM_TIMEOUT_S" + # Default idle timeout (seconds) when env var or request override not provided + DEFAULT_STREAM_TIMEOUT_S = 300.0 + + # model defaults + DEFAULT_TEMPERATURE = 1.0 + DEFAULT_TOP_P = 1.0 diff --git a/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/py.typed b/sdk/ai/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-agentframework/cspell.json b/sdk/ai/azure-ai-agentserver-agentframework/cspell.json new file mode 100644 index 000000000000..c06caaf5862c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/cspell.json @@ -0,0 +1,15 @@ +{ + "ignoreWords": [ + "azureai", + "fstring", + "mslearn", + "envtemplate", + "pysort" + ], + "ignorePaths": [ + "*.csv", + "*.json", + "*.rst", + "*/samples/*" + ] + } \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/dev_requirements.txt b/sdk/ai/azure-ai-agentserver-agentframework/dev_requirements.txt new file mode 100644 index 000000000000..6c036d7fb4e0 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/dev_requirements.txt @@ -0,0 +1,4 @@ +-e ../../../eng/tools/azure-sdk-tools +../azure-ai-agentserver-core +python-dotenv +pywin32; sys_platform == 'win32' \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/pyproject.toml b/sdk/ai/azure-ai-agentserver-agentframework/pyproject.toml new file mode 100644 index 000000000000..bac6bb8f24d3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/pyproject.toml @@ -0,0 +1,61 @@ +[project] +name = "azure-ai-agentserver-agentframework" +# Keep using your _version.py; setuptools reads it via [tool.setuptools.dynamic] below +dynamic = ["version", "readme"] +description = "Agents server adapter for Azure AI" +requires-python = ">=3.10" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +license = "MIT" +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +keywords = ["azure", "azure sdk"] + +dependencies = [ + "azure-ai-agentserver-core", + "agent_framework_azure_ai>=1.0.0b251007", + "opentelemetry-exporter-otlp-proto-grpc>=1.36.0", +] + +[build-system] +requires = ["setuptools>=69", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "samples*", + "doc*", + "azure", + "azure.ai", +] + +[tool.setuptools.dynamic] +version = { attr = "azure.ai.agentserver.agentframework._version.VERSION" } +readme = { file = ["README.md"], content-type = "text/markdown" } + +[tool.setuptools.package-data] +pytyped = ["py.typed"] + +[tool.ruff] +line-length = 120 +target-version = "py311" +lint.select = ["E", "F", "B", "I"] # E=pycodestyle errors, F=Pyflakes, B=bugbear, I=import sort +lint.ignore = [] +fix = false + +[tool.ruff.lint.isort] +known-first-party = ["azure.ai.agentserver.agentframework"] +combine-as-imports = true + +[tool.azure-sdk-build] +verifytypes = false # has unknown dependencies +pyright = false diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/.envtemplate b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/.envtemplate new file mode 100644 index 000000000000..bd646f163bb7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/.envtemplate @@ -0,0 +1,3 @@ +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/README.md b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/README.md new file mode 100644 index 000000000000..64f19cefcbcb --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/README.md @@ -0,0 +1,46 @@ +pip install -e src/adapter/python +# Agent Framework Sample + +This sample demonstrates how to use the agents hosting adapter with Microsoft Agent Framework. + +## Prerequisites + +> **Azure sign-in:** Run `az login` before starting the sample so `DefaultAzureCredential` can acquire a CLI token. + +### Environment Variables + +Copy `.envtemplate` to `.env` and supply: + +``` +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +``` + +## Running the Sample + +Follow these steps from this folder: + +1) Start the agent server (defaults to 0.0.0.0:8088): + +```bash +python minimal_example.py +``` + +2) Send a non-streaming request (returns a single JSON response): + +```bash +curl -sS \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/responses \ + -d "{\"input\":\"What's the weather like in Seattle?\",\"stream\":false}" +``` + +3) Send a streaming request (server-sent events). Use -N to disable curl buffering: + +```bash +curl -N \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/responses \ + -d "{\"input\":\"What's the weather like in New York?\",\"stream\":true}" +``` \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/minimal_example.py b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/minimal_example.py new file mode 100644 index 000000000000..15afa52f42b8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/minimal_example.py @@ -0,0 +1,33 @@ +# Copyright (c) Microsoft. All rights reserved. + +from random import randint +from typing import Annotated + +from agent_framework.azure import AzureOpenAIChatClient +from azure.identity import DefaultAzureCredential +from dotenv import load_dotenv + +from azure.ai.agentserver.agentframework import from_agent_framework + +load_dotenv() + + +def get_weather( + location: Annotated[str, "The location to get the weather for."], +) -> str: + """Get the weather for a given location.""" + conditions = ["sunny", "cloudy", "rainy", "stormy"] + return f"The weather in {location} is {conditions[randint(0, 3)]} with a high of {randint(10, 30)}°C." + + +def main() -> None: + agent = AzureOpenAIChatClient(credential=DefaultAzureCredential()).create_agent( + instructions="You are a helpful weather agent.", + tools=get_weather, + ) + + from_agent_framework(agent).run() + + +if __name__ == "__main__": + main() diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/requirements.txt b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/requirements.txt new file mode 100644 index 000000000000..c044abf99eb1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/basic_simple/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv>=1.0.0 +azure-identity +agent-framework-azure-ai +azure-ai-agentserver-core +azure-ai-agentserver-agentframework diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/.envtemplate b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/.envtemplate new file mode 100644 index 000000000000..4130d12b4da8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/.envtemplate @@ -0,0 +1,4 @@ +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +GITHUB_TOKEN= \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/README.md b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/README.md new file mode 100644 index 000000000000..3a033eb45c43 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/README.md @@ -0,0 +1,61 @@ +pip install -e src/adapter/python +## Agent Framework MCP GitHub Token Sample + +This sample mirrors the simpler `mcp_simple` Agent Framework sample but adds an MCP server (GitHub) that requires a Bearer token (`GITHUB_TOKEN`). The token is injected as an HTTP Authorization header when constructing the `MCPStreamableHTTPTool`. + +### Script + +- `mcp_apikey.py` – Creates a `ChatAgent` configured with an `AzureOpenAIChatClient` and a GitHub MCP tool, then serves it via the agents hosting adapter (`from_agent_framework(...).run_async()`). + +## Prerequisites + +> **Azure sign-in:** Run `az login` before starting the sample so `DefaultAzureCredential` can acquire a CLI token. + +### Environment Variables + +Copy `.envtemplate` to `.env` and supply: + +``` +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +GITHUB_TOKEN= +``` + +### GitHub Token Setup + +To obtain a GitHub token for the MCP server: + +1. Go to [GitHub Settings > Developer settings > Personal access tokens > Tokens (classic)](https://github.com/settings/tokens) +1. Click "Generate new token" → "Generate new token (classic)" +1. Select the minimum required scopes under the "repo" category. For this sample, the following scopes are sufficient: + - `public_repo` (Access public repositories) + - `repo:status` (Access commit statuses) + If you need access to private repositories, also select `repo` (Full control of private repositories). +1. Click "Generate token" +1. Copy the token immediately (you won't be able to see it again) +1. Add it to your `.env` file as `GITHUB_TOKEN=` + +### Run + +From this folder: + +```bash +python mcp_apikey.py +``` + +### Test (non‑streaming example) + +```bash +curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{"input":"summarize the last change in ","stream":false}' +``` + +### Test (streaming example) + +```bash +curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{"input":"summarize the last change in ","stream":true}' +``` diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/mcp_apikey.py b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/mcp_apikey.py new file mode 100644 index 000000000000..985d7fd01e0c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/mcp_apikey.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from agent_framework import MCPStreamableHTTPTool +from agent_framework.azure import AzureOpenAIChatClient +from azure.identity import DefaultAzureCredential +from dotenv import load_dotenv + +from azure.ai.agentserver.agentframework import from_agent_framework + +MCP_TOOL_NAME = "github" # Expected tool name exposed by the GitHub MCP server +MCP_TOOL_URL = "https://api.githubcopilot.com/mcp/" # Base MCP server endpoint + +load_dotenv() + + +async def main() -> None: + github_token = os.getenv("GITHUB_TOKEN") + if not github_token: + raise RuntimeError( + "GITHUB_TOKEN environment variable not set. Provide a GitHub token with MCP access." + ) + + agent = AzureOpenAIChatClient(credential=DefaultAzureCredential()).create_agent( + instructions="You are a helpful assistant that answers GitHub questions. Use only the exposed MCP tools.", + tools=MCPStreamableHTTPTool( + name=MCP_TOOL_NAME, + url=MCP_TOOL_URL, + headers={ + "Authorization": f"Bearer {github_token}", + }, + ), + ) + + async with agent: + await from_agent_framework(agent).run_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/requirements.txt b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/requirements.txt new file mode 100644 index 000000000000..c044abf99eb1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_apikey/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv>=1.0.0 +azure-identity +agent-framework-azure-ai +azure-ai-agentserver-core +azure-ai-agentserver-agentframework diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/.envtemplate b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/.envtemplate new file mode 100644 index 000000000000..bd646f163bb7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/.envtemplate @@ -0,0 +1,3 @@ +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/README.md b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/README.md new file mode 100644 index 000000000000..c50da8cdcd72 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/README.md @@ -0,0 +1,340 @@ +# Agent Framework MCP Simple Python Sample + +This sample demonstrates how to run a Microsoft Agent Framework `ChatAgent` that calls a Model Context Protocol (MCP) HTTP endpoint (Microsoft Learn MCP) using the Container Agents Adapter and the `AzureAIAgentClient` from the `agent-framework-azure-ai` package. + +## What It Shows +- Creating an Agent Framework `ChatAgent` with an `AzureAIAgentClient` +- Adding an MCP tool via `MCPStreamableHTTPTool` +- Serving the agent over HTTP using the Container Agents Adapter (`from_agent_framework(...).run()`) +- Handling both streaming and non‑streaming response modes (client controlled via the `stream` flag in the request body) + +## File Overview +- `mcp_simple.py` – Agent factory + server bootstrap. Loads `.env` relative to its location. +- `.env` – Local environment file with Azure AI project configuration variables. + +## Prerequisites + +> **Azure sign-in:** Run `az login` before starting the sample so `DefaultAzureCredential` can acquire a CLI token. + +Packages actually imported by `simple-mcp.py`: +- agent-framework-azure-ai (published package with Agent Framework client + MCP support) +- agents_adapter +- azure-identity +- python-dotenv + +Install from PyPI (from the repo root: `container_agents/`): +```bash +pip install agent-framework-azure-ai azure-identity python-dotenv +curl -sS \ +# Agent Framework MCP Simple Python Sample + +This sample demonstrates how to run a Microsoft Agent Framework `ChatAgent` that calls a Model Context Protocol (MCP) HTTP endpoint (Microsoft Learn MCP) using the agentserver adapter and the `AzureOpenAIChatClient` from the `agent-framework` package. + +## What It Shows + +- Creating an Agent Framework `ChatAgent` with an `AzureOpenAIChatClient` +- Adding an MCP tool via `MCPStreamableHTTPTool` +- Serving the agent over HTTP using the agentserver adapter (`from_agent_framework(...).run()`) +- Handling both streaming and non‑streaming response modes (client controlled via the `stream` flag in the request body) + +## File Overview + +- `mcp_simple.py` – Agent factory + server bootstrap. Loads `.env` relative to its location. +- `.env` – Local environment file with Azure AI project configuration variables. + +## Prerequisites + +> **Azure sign-in:** Run `az login` before starting the sample so `DefaultAzureCredential` can acquire a CLI token. + +### Install Dependencies + +Initialize a virtual environment and then install dependencies: + +```bash +pip install ../../wheels/azure_ai_agentshosting-0.0.1+g407f536.d251024-py3-none-any.whl +pip install -r ./requirements.txt --pre +``` + +### Environment Variables + +Copy `.envtemplate` to `.env` and supply: + +``` +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +``` + +## Running the Server + +From this folder: + +```bash +python mcp_simple.py +``` + +## Making Requests + +Non‑streaming: + +```bash +curl -sS \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/responses \ + -d "{\"input\":\"How do I create an Azure Storage Account using the Azure CLI?\",\"stream\":false}" +``` + +Streaming (Server‑Sent Events, keep `-N` to avoid curl buffering): + +```bash +curl -sS \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/responses \ + -d "{\"input\":\"What is Microsoft Semantic Kernel in brief?\",\"stream\":true}" +``` + +[comment]: # ( cspell:ignore mult ained ) + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 51, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " traditional"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 52, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " programming"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 53, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "."} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 54, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " Semantic"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 55, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " Kernel"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 56, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " allows"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 57, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " developers"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 58, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " to"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 59, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " combine"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 60, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " natural"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 61, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " language"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 62, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " processing"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 63, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " capabilities"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 64, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " with"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 65, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " conventional"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 66, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " code"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 67, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " to"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 68, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " create"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 69, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " AI"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 70, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "-based"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 71, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " solutions"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 72, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " that"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 73, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " include"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 74, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " memory"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 75, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " management"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 76, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": ","} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 77, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " complex"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 78, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " workflows"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 79, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": ","} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 80, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " embeddings"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 81, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": ","} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 82, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " and"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 83, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " intelligent"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 84, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " decision"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 85, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "-making"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 86, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " features"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 87, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "."} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 88, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " Its"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 89, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " extens"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 90, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "ible"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 91, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " and"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 92, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " modular"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 93, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " design"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 94, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " supports"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 95, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " the"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 96, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " creation"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 97, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " of"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 98, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " complex"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 99, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": ","} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 100, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " mult"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 101, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "iste"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 102, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "p"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 103, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " pipelines"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 104, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " that"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 105, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " take"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 106, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " advantage"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 107, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " of"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 108, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " the"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 109, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " power"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 110, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " of"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 111, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " L"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 112, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "LM"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 113, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "s"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 114, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " while"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 115, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " allowing"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 116, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " fine"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 117, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "-gr"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 118, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "ained"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 119, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " control"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 120, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " for"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 121, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": " developers"} + +event: response.output_text.delta +data: {"type": "response.output_text.delta", "sequence_number": 122, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "delta": "."} + +event: response.output_text.done +data: {"type": "response.output_text.done", "sequence_number": 123, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "text": "Microsoft Semantic Kernel (SK) is an open-source software development kit (SDK) designed to help developers build AI applications by seamlessly integrating large language models (LLMs), such as OpenAI's GPT or Azure OpenAI Service, with traditional programming. Semantic Kernel allows developers to combine natural language processing capabilities with conventional code to create AI-based solutions that include memory management, complex workflows, embeddings, and intelligent decision-making features. Its extensible and modular design supports the creation of complex, multistep pipelines that take advantage of the power of LLMs while allowing fine-grained control for developers."} + +event: response.content_part.done +data: {"type": "response.content_part.done", "sequence_number": 124, "item_id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "output_index": 0, "content_index": 0, "part": {"type": "output_text", "text": "Microsoft Semantic Kernel (SK) is an open-source software development kit (SDK) designed to help developers build AI applications by seamlessly integrating large language models (LLMs), such as OpenAI's GPT or Azure OpenAI Service, with traditional programming. Semantic Kernel allows developers to combine natural language processing capabilities with conventional code to create AI-based solutions that include memory management, complex workflows, embeddings, and intelligent decision-making features. Its extensible and modular design supports the creation of complex, multistep pipelines that take advantage of the power of LLMs while allowing fine-grained control for developers.", "annotations": []}} + +event: response.output_item.done +data: {"type": "response.output_item.done", "sequence_number": 125, "output_index": 0, "item": {"type": "message", "role": "assistant", "id": "7c7115b0-b1b2-4682-9acd-5cb05a3c8123", "status": "completed", "content": [{"type": "output_text", "text": "Microsoft Semantic Kernel (SK) is an open-source software development kit (SDK) designed to help developers build AI applications by seamlessly integrating large language models (LLMs), such as OpenAI's GPT or Azure OpenAI Service, with traditional programming. Semantic Kernel allows developers to combine natural language processing capabilities with conventional code to create AI-based solutions that include memory management, complex workflows, embeddings, and intelligent decision-making features. Its extensible and modular design supports the creation of complex, multistep pipelines that take advantage of the power of LLMs while allowing fine-grained control for developers.", "annotations": []}]}} + +event: response.completed +data: {"type": "response.completed", "sequence_number": 126, "response": {"metadata": {}, "temperature": 1.0, "top_p": 1.0, "user": "", "id": "41249d4a-f6e4-4a01-950b-b67e9c812a7b", "created_at": 1757651565, "output": [{"id": "08772107-2062-40ed-982e-704d685a84df", "type": "message", "role": "assistant", "status": "completed", "content": [{"type": "output_text", "text": "Microsoft Semantic Kernel (SK) is an open-source software development kit (SDK) designed to help developers build AI applications by seamlessly integrating large language models (LLMs), such as OpenAI's GPT or Azure OpenAI Service, with traditional programming. Semantic Kernel allows developers to combine natural language processing capabilities with conventional code to create AI-based solutions that include memory management, complex workflows, embeddings, and intelligent decision-making features. Its extensible and modular design supports the creation of complex, multistep pipelines that take advantage of the power of LLMs while allowing fine-grained control for developers.", "annotations": []}]}], "parallel_tool_calls": true, "status": "completed", "object": "response"}} +``` + +## Customization Ideas +- Add additional MCP tools (multiple `MCPStreamableHTTPTool` instances in a list) +- Combine MCP + local Python tool functions +- Swap `AzureChatClient` for a different model provider client supported by Agent Framework + +## Troubleshooting +- 401/403 errors: Check Azure AI project endpoint & deployment values in `.env` and ensure your Azure login or service principal credentials are valid +- Name resolution / network errors: Verify the MCP endpoint URL is reachable (`curl https://learn.microsoft.com/api/mcp`) +- Empty / slow responses: Ensure the Azure AI deployment name matches an active model deployment in the project and that the service has sufficient quota + +## Support +For Agent Framework issues: https://github.com/microsoft/agent-framework + +For adapter issues, open an issue in this repository. diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/mcp_simple.py b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/mcp_simple.py new file mode 100644 index 000000000000..6b59771fe0da --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/mcp_simple.py @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from agent_framework import MCPStreamableHTTPTool +from agent_framework.azure import AzureOpenAIChatClient +from azure.identity import DefaultAzureCredential +from dotenv import load_dotenv + +from azure.ai.agentserver.agentframework import from_agent_framework + +MCP_TOOL_NAME = "Microsoft Learn MCP" +MCP_TOOL_URL = "https://learn.microsoft.com/api/mcp" + +load_dotenv() + + +async def main() -> None: + agent = AzureOpenAIChatClient(credential=DefaultAzureCredential()).create_agent( + instructions="You are a helpful assistant that answers Microsoft documentation questions.", + tools=MCPStreamableHTTPTool(name=MCP_TOOL_NAME, url=MCP_TOOL_URL), + ) + + async with agent: + await from_agent_framework(agent).run_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/requirements.txt b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/requirements.txt new file mode 100644 index 000000000000..c044abf99eb1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/mcp_simple/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv>=1.0.0 +azure-identity +agent-framework-azure-ai +azure-ai-agentserver-core +azure-ai-agentserver-agentframework diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/.envtemplate b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/.envtemplate new file mode 100644 index 000000000000..6bbdcc8dda37 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/.envtemplate @@ -0,0 +1,3 @@ +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/README.md b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/README.md new file mode 100644 index 000000000000..b7124bd8b5da --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/README.md @@ -0,0 +1,45 @@ +# Agent Framework Async Python Sample + +This sample demonstrates how to use the agents hosting adapter in an async implementation with Microsoft Agent Framework. + +## Prerequisites + +> **Azure sign-in:** Run `az login` before starting the sample so `DefaultAzureCredential` can acquire a CLI token. + +### Environment Variables + +Copy `.envtemplate` to `.env` and supply: + +``` +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +``` + +## Running the Sample + +Follow these steps from this folder: + +1) Start the agent server (defaults to 0.0.0.0:8088): + +```bash +python minimal_async_example.py +``` + +2) Send a non-streaming request (returns a single JSON response): + +```bash +curl -sS \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/responses \ + -d "{\"input\":\"What's the weather like in Seattle?\",\"stream\":false}" +``` + +3) Send a streaming request (server-sent events). Use -N to disable curl buffering: + +```bash +curl -N \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/responses \ + -d "{\"input\":\"What's the weather like in New York?\",\"stream\":true}" +``` diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/minimal_async_example.py b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/minimal_async_example.py new file mode 100644 index 000000000000..4c69c8afa84d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/minimal_async_example.py @@ -0,0 +1,35 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from random import randint +from typing import Annotated + +from agent_framework.azure import AzureOpenAIChatClient +from azure.identity import DefaultAzureCredential +from dotenv import load_dotenv + +from azure.ai.agentserver.agentframework import from_agent_framework + +load_dotenv() + + +def get_weather( + location: Annotated[str, "The location to get the weather for."], +) -> str: + """Get the weather for a given location.""" + conditions = ["sunny", "cloudy", "rainy", "stormy"] + return f"The weather in {location} is {conditions[randint(0, 3)]} with a high of {randint(10, 30)}°C." + + +async def main() -> None: + agent = AzureOpenAIChatClient(credential=DefaultAzureCredential()).create_agent( + instructions="You are a helpful weather agent.", + tools=get_weather, + ) + + async with agent: + await from_agent_framework(agent).run_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/requirements.txt b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/requirements.txt new file mode 100644 index 000000000000..1b446cdc0367 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/simple_async/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv +azure-identity +agent-framework-azure-ai +azure-ai-agentserver-core +azure-ai-agentserver-agentframework \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/.envtemplate b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/.envtemplate new file mode 100644 index 000000000000..990182342fca --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/.envtemplate @@ -0,0 +1,3 @@ +AZURE_AI_PROJECT_ENDPOINT= +AZURE_AI_MODEL_DEPLOYMENT_NAME= +AGENT_PROJECT_NAME= \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/README.md b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/README.md new file mode 100644 index 000000000000..59bb6b9f19ec --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/README.md @@ -0,0 +1,287 @@ +## Workflow Agent Reflection Sample (Python) + +This sample demonstrates how to wrap an Agent Framework workflow (with iterative review + improvement) as an agent using the Container Agents Adapter. It implements a "reflection" pattern consisting of two executors: + +- Worker: Produces an initial answer (and revised answers after feedback) +- Reviewer: Evaluates the answer against quality criteria and either approves or returns constructive feedback + +The workflow cycles until the Reviewer approves the response. Only approved content is emitted externally (streamed the same way as a normal agent response). This pattern is useful for quality‑controlled assistance, gated tool use, evaluative chains, or iterative refinement. + +### Key Concepts Shown +- `WorkflowBuilder` + `.as_agent()` to expose a workflow as a standard agent +- Bidirectional edges enabling cyclical review (Worker ↔ Reviewer) +- Structured output parsing (Pydantic model) for review feedback +- Emitting `AgentRunUpdateEvent` to stream only approved messages +- Managing pending requests and re‑submission with incorporated feedback + +File: `workflow_agent_simple.py` + +--- + +## Prerequisites + +> **Azure sign-in:** Run `az login` before starting the sample so `DefaultAzureCredential` can acquire a CLI token. + +Dependencies used by `workflow_agent_simple.py`: +- agent-framework-azure-ai (published package with workflow abstractions) +- agents_adapter +- azure-identity (for `DefaultAzureCredential`) +- python-dotenv (loads `.env` for local credentials) +- pydantic (pulled transitively; listed for clarity) + +Install from PyPI (from the repo root: `container_agents/`): +```bash +pip install agent-framework-azure-ai azure-identity python-dotenv + +pip install -e src/adapter/python +``` + +--- + +## Additional Requirements + +1. Azure AI project with a model deployment (supports Microsoft hosted, Azure OpenAI, or custom models exposed via Azure AI Foundry). + +--- + +## Configuration + +Copy `.envtemplate` to `.env` and fill in real values: +``` +AZURE_AI_PROJECT_ENDPOINT= +AZURE_AI_MODEL_DEPLOYMENT_NAME= +AGENT_PROJECT_NAME= +``` +`AGENT_PROJECT_NAME` lets you override the default Azure AI agent project for this workflow; omit it to fall back to the SDK default. + +--- + +## Run the Workflow Agent + +From this folder: + +```bash +python workflow_agent_simple.py +``` +The server (via the adapter) will start on `0.0.0.0:8088` by default. + +--- + +## Send a Non‑Streaming Request + +```bash +curl -sS \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/runs \ + -d '{"input":"Explain the concept of reflection in this workflow sample.","stream":false}' +``` + +Sample output (non‑streaming): + +``` +Processing 1 million files in parallel and writing their contents into a sorted output file can be a computationally and resource-intensive task. To handle it effectively, you can use Python with libraries like `concurrent.futures` for parallelism and `heapq` for the sorting and merging. + +Below is an example implementation: + +import os +from concurrent.futures import ThreadPoolExecutor +import heapq + +def read_file(file_path): + """Read the content of a single file and return it as a list of lines.""" + with open(file_path, 'r') as file: + return file.readlines() + +def parallel_read_files(file_paths, max_workers=8): + """ + Read files in parallel and return all the lines in memory. + :param file_paths: List of file paths to read. + :param max_workers: Number of worker threads to use for parallelism. + """ + all_lines = [] + with ThreadPoolExecutor(max_workers=max_workers) as executor: + # Submit tasks to read each file in parallel + results = executor.map(read_file, file_paths) + # Collect the results + for lines in results: + all_lines.extend(lines) + return all_lines + +def write_sorted_output(lines, output_file_path): + """ + Write sorted lines to the output file. + :param lines: List of strings to be sorted and written. + :param output_file_path: File path to write the sorted result. + """ + sorted_lines = sorted(lines) + with open(output_file_path, 'w') as output_file: + output_file.writelines(sorted_lines) + +def main(directory_path, output_file_path): + """ + Main function to read files in parallel and write sorted output. + :param directory_path: Path to the directory containing input files. + :param output_file_path: File path to write the sorted output. + """ + # Get a list of all the file paths in the given directory + file_paths = [os.path.join(directory_path, f) for f in os.listdir(directory_path) if os.path.isfile(os.path.join(directory_path, f))] + + print(f"Found {len(file_paths)} files. Reading files in parallel...") + + # Read all lines from the files in parallel + all_lines = parallel_read_files(file_paths) + + print(f"Total lines read: {len(all_lines)}. Sorting and writing to output file...") + + # Write the sorted lines to the output file + write_sorted_output(all_lines, output_file_path) + + print(f"Sorted output written to: {output_file_path}") + +if __name__ == "__main__": + # Replace these paths with the appropriate input directory and output file path + input_directory = "path/to/input/directory" # Directory containing 1 million files + output_file = "path/to/output/sorted_output.txt" # Output file path + + main(input_directory, output_file) + +### Key Features and Steps: + +1. **Parallel Reading with `ThreadPoolExecutor`**: + - Files are read in parallel using threads to improve I/O performance since reading many files is mostly I/O-bound. + +2. **Sorting and Writing**: + - Once all lines are aggregated into memory, they are sorted using Python's `sorted()` function and written to the output file in one go. + +3. **Handles Large Number of Files**: + - The program uses threads to manage the potentially massive number of files in parallel, saving time instead of processing them serially. + +### Considerations: +- **Memory Usage**: This script reads all file contents into memory. If the total size of the files is too large, you may encounter memory issues. In such cases, consider processing the files in smaller chunks. +- **Sorting**: For extremely large data, consider using an external/merge sort technique to handle sorting in smaller chunks. +- **I/O Performance**: Ensure that your I/O subsystem and disk can handle the load. + +Let me know if you'd like an optimized version to handle larger datasets with limited memory! + +Usage (if provided): None +``` + +--- + +## Send a Streaming Request (Server-Sent Events) + +```bash +curl -N \ + -H "Content-Type: application/json" \ + -X POST http://localhost:8088/runs \ + -d '{"input":"How does the reviewer decide to approve?","stream":true}' +``` + +Sample output (streaming): + +``` +Here is a Python script that demonstrates parallel reading of 1 million files using `concurrent.futures` for parallelism and `heapq` to write the outputs to a sorted file. This approach ensures efficiency when dealing with such a large number of files. + + +import os +import heapq +from concurrent.futures import ThreadPoolExecutor + +def read_file(file_path): + """ + Read the content of a single file and return it as a list of lines. + """ + with open(file_path, 'r') as file: + return file.readlines() + +def parallel_read_files(file_paths, max_workers=4): + """ + Read multiple files in parallel. + """ + all_lines = [] + with ThreadPoolExecutor(max_workers=max_workers) as executor: + # Submit reading tasks to the thread pool + futures = [executor.submit(read_file, file_path) for file_path in file_paths] + + # Gather results as they are completed + for future in futures: + all_lines.extend(future.result()) + + return all_lines + +def write_sorted_output(lines, output_file): + """ + Write sorted lines to an output file. + """ + sorted_lines = sorted(lines) + with open(output_file, 'w') as file: + file.writelines(sorted_lines) + +if __name__ == "__main__": + # Set the directory containing your input files + input_directory = 'path_to_your_folder_with_files' + + # Get the list of all input files + file_paths = [os.path.join(input_directory, f) for f in os.listdir(input_directory) if os.path.isfile(os.path.join(input_directory, f))] + + # Specify the number of threads for parallel processing + max_threads = 8 # Adjust according to your system's capabilities + + # Step 1: Read all files in parallel + print("Reading files in parallel...") + all_lines = parallel_read_files(file_paths, max_workers=max_threads) + + # Step 2: Write the sorted data to the output file + output_file = 'sorted_output.txt' + print(f"Writing sorted output to {output_file}...") + write_sorted_output(all_lines, output_file) + + print("Operation complete.") + +[comment]: # ( cspell:ignore pysort ) + +### Key Points: +1. **Parallel Read**: The reading of files is handled using `concurrent.futures.ThreadPoolExecutor`, allowing multiple files to be processed simultaneously. + +2. **Sorted Output**: After collecting all lines from the files, the `sorted()` function is used to sort the content in memory. This ensures that the final output file will have all data in sorted order. + +3. **Adjustable Parallelism**: The `max_threads` parameter can be modified to control the number of threads used for file reading. The value should match your system's capabilities for optimal performance. + +4. **Large Data Handling**: If the data from 1 million files is too large to fit into memory, consider using an external merge sort algorithm or a library like `pysort` for efficient external sorting. + +Let me know if you'd like improvements or adjustments for more specific scenarios! +Final usage (if provided): None +``` + +> Only the final approved assistant content is emitted as normal output deltas; intermediate review feedback stays internal. + +--- + +## How the Reflection Loop Works +1. User query enters the workflow (Worker start executor) +2. Worker produces an answer with model call +3. Reviewer evaluates using a structured schema (`feedback`, `approved`) +4. If not approved: Worker augments context with feedback + regeneration instruction, then re‑answers +5. Loop continues until `approved=True` +6. Approved content is emitted as `AgentRunResponseUpdate` (streamed externally) + +--- + +## Troubleshooting +| Issue | Resolution | +|-------|------------| +| `DefaultAzureCredential` errors | Run `az login` or configure a service principal. | +| Empty / no streaming | Confirm `stream` flag in request JSON and that the event loop is healthy. | +| Model 404 / deployment error | Verify `AZURE_AI_MODEL_DEPLOYMENT_NAME` exists in the Azure AI project configured by `AZURE_AI_PROJECT_ENDPOINT`. | +| `.env` not loading | Ensure `.env` sits beside the script (or set `dotenv_path`) and that `python-dotenv` is installed. | + +--- + +## Related Resources +- Agent Framework repo: https://github.com/microsoft/agent-framework +- Basic simple sample README (same folder structure) for installation reference + +--- + +## License & Support +This sample follows the repository's LICENSE. For questions about unreleased Agent Framework features, contact the Agent Framework team via its GitHub repository. diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/requirements.txt b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/requirements.txt new file mode 100644 index 000000000000..bfc51b4deaa3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/requirements.txt @@ -0,0 +1,6 @@ +python-dotenv>=1.0.0 +pydantic==2.12.2 +azure-identity +agent-framework-azure-ai +azure-ai-agentserver-core +azure-ai-agentserver-agentframework diff --git a/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/workflow_agent_simple.py b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/workflow_agent_simple.py new file mode 100644 index 000000000000..ce3cca956273 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/samples/workflow_agent_simple/workflow_agent_simple.py @@ -0,0 +1,292 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from dataclasses import dataclass +from uuid import uuid4 + +from agent_framework import ( + AgentRunResponseUpdate, + AgentRunUpdateEvent, + BaseChatClient, + ChatMessage, + Contents, + Executor, + Role as ChatRole, + WorkflowBuilder, + WorkflowContext, + handler, +) +from agent_framework_azure_ai import AzureAIAgentClient +from azure.identity.aio import DefaultAzureCredential +from dotenv import load_dotenv +from pydantic import BaseModel + +from azure.ai.agentserver.agentframework import from_agent_framework + +""" +The following sample demonstrates how to wrap a workflow as an agent using WorkflowAgent. + +This sample shows how to: +1. Create a workflow with a reflection pattern (Worker + Reviewer executors) +2. Wrap the workflow as an agent using the .as_agent() method +3. Stream responses from the workflow agent like a regular agent +4. Implement a review-retry mechanism where responses are iteratively improved + +The example implements a quality-controlled AI assistant where: +- Worker executor generates responses to user queries +- Reviewer executor evaluates the responses and provides feedback +- If not approved, the Worker incorporates feedback and regenerates the response +- The cycle continues until the response is approved +- Only approved responses are emitted to the external consumer + +Key concepts demonstrated: +- WorkflowAgent: Wraps a workflow to make it behave as an agent +- Bidirectional workflow with cycles (Worker ↔ Reviewer) +- AgentRunUpdateEvent: How workflows communicate with external consumers +- Structured output parsing for review feedback +- State management with pending requests tracking +""" + + +@dataclass +class ReviewRequest: + request_id: str + user_messages: list[ChatMessage] + agent_messages: list[ChatMessage] + + +@dataclass +class ReviewResponse: + request_id: str + feedback: str + approved: bool + + +load_dotenv() + + +class Reviewer(Executor): + """An executor that reviews messages and provides feedback.""" + + def __init__(self, chat_client: BaseChatClient) -> None: + super().__init__(id="reviewer") + self._chat_client = chat_client + + @handler + async def review( + self, request: ReviewRequest, ctx: WorkflowContext[ReviewResponse] + ) -> None: + print( + f"🔍 Reviewer: Evaluating response for request {request.request_id[:8]}..." + ) + + # Use the chat client to review the message and use structured output. + # NOTE: this can be modified to use an evaluation framework. + + class _Response(BaseModel): + feedback: str + approved: bool + + # Define the system prompt. + messages = [ + ChatMessage( + role=ChatRole.SYSTEM, + text="You are a reviewer for an AI agent, please provide feedback on the " + "following exchange between a user and the AI agent, " + "and indicate if the agent's responses are approved or not.\n" + "Use the following criteria for your evaluation:\n" + "- Relevance: Does the response address the user's query?\n" + "- Accuracy: Is the information provided correct?\n" + "- Clarity: Is the response easy to understand?\n" + "- Completeness: Does the response cover all aspects of the query?\n" + "Be critical in your evaluation and provide constructive feedback.\n" + "Do not approve until all criteria are met.", + ) + ] + + # Add user and agent messages to the chat history. + messages.extend(request.user_messages) + + # Add agent messages to the chat history. + messages.extend(request.agent_messages) + + # Add add one more instruction for the assistant to follow. + messages.append( + ChatMessage( + role=ChatRole.USER, + text="Please provide a review of the agent's responses to the user.", + ) + ) + + print("🔍 Reviewer: Sending review request to LLM...") + # Get the response from the chat client. + response = await self._chat_client.get_response( + messages=messages, response_format=_Response + ) + + # Parse the response. + parsed = _Response.model_validate_json(response.messages[-1].text) + + print(f"🔍 Reviewer: Review complete - Approved: {parsed.approved}") + print(f"🔍 Reviewer: Feedback: {parsed.feedback}") + + # Send the review response. + await ctx.send_message( + ReviewResponse( + request_id=request.request_id, + feedback=parsed.feedback, + approved=parsed.approved, + ) + ) + + +class Worker(Executor): + """An executor that performs tasks for the user.""" + + def __init__(self, chat_client: BaseChatClient) -> None: + super().__init__(id="worker") + self._chat_client = chat_client + self._pending_requests: dict[str, tuple[ReviewRequest, list[ChatMessage]]] = {} + + @handler + async def handle_user_messages( + self, user_messages: list[ChatMessage], ctx: WorkflowContext[ReviewRequest] + ) -> None: + print("🔧 Worker: Received user messages, generating response...") + + # Handle user messages and prepare a review request for the reviewer. + # Define the system prompt. + messages = [ + ChatMessage(role=ChatRole.SYSTEM, text="You are a helpful assistant.") + ] + + # Add user messages. + messages.extend(user_messages) + + print("🔧 Worker: Calling LLM to generate response...") + # Get the response from the chat client. + response = await self._chat_client.get_response(messages=messages) + print(f"🔧 Worker: Response generated: {response.messages[-1].text}") + + # Add agent messages. + messages.extend(response.messages) + + # Create the review request. + request = ReviewRequest( + request_id=str(uuid4()), + user_messages=user_messages, + agent_messages=response.messages, + ) + + print( + f"🔧 Worker: Generated response, sending to reviewer (ID: {request.request_id[:8]})" + ) + # Send the review request. + await ctx.send_message(request) + + # Add to pending requests. + self._pending_requests[request.request_id] = (request, messages) + + @handler + async def handle_review_response( + self, review: ReviewResponse, ctx: WorkflowContext[ReviewRequest] + ) -> None: + print( + f"🔧 Worker: Received review for request {review.request_id[:8]} - Approved: {review.approved}" + ) + + # Handle the review response. Depending on the approval status, + # either emit the approved response as AgentRunUpdateEvent, or + # retry given the feedback. + if review.request_id not in self._pending_requests: + raise ValueError( + f"Received review response for unknown request ID: {review.request_id}" + ) + # Remove the request from pending requests. + request, messages = self._pending_requests.pop(review.request_id) + + if review.approved: + print("✅ Worker: Response approved! Emitting to external consumer...") + # If approved, emit the agent run response update to the workflow's + # external consumer. + contents: list[Contents] = [] + for message in request.agent_messages: + contents.extend(message.contents) + # Emitting an AgentRunUpdateEvent in a workflow wrapped by a WorkflowAgent + # will send the AgentRunResponseUpdate to the WorkflowAgent's + # event stream. + await ctx.add_event( + AgentRunUpdateEvent( + self.id, + data=AgentRunResponseUpdate( + contents=contents, role=ChatRole.ASSISTANT + ), + ) + ) + return + + print(f"❌ Worker: Response not approved. Feedback: {review.feedback}") + print("🔧 Worker: Incorporating feedback and regenerating response...") + + # Construct new messages with feedback. + messages.append(ChatMessage(role=ChatRole.SYSTEM, text=review.feedback)) + + # Add additional instruction to address the feedback. + messages.append( + ChatMessage( + role=ChatRole.SYSTEM, + text="Please incorporate the feedback above, and provide a response to user's next message.", + ) + ) + messages.extend(request.user_messages) + + # Get the new response from the chat client. + response = await self._chat_client.get_response(messages=messages) + print( + f"🔧 Worker: New response generated after feedback: {response.messages[-1].text}" + ) + + # Process the response. + messages.extend(response.messages) + + print( + f"🔧 Worker: Generated improved response, sending for re-review (ID: {review.request_id[:8]})" + ) + # Send an updated review request. + new_request = ReviewRequest( + request_id=review.request_id, + user_messages=request.user_messages, + agent_messages=response.messages, + ) + await ctx.send_message(new_request) + + # Add to pending requests. + self._pending_requests[new_request.request_id] = (new_request, messages) + + +def build_agent(chat_client: BaseChatClient): + reviewer = Reviewer(chat_client=chat_client) + worker = Worker(chat_client=chat_client) + return ( + WorkflowBuilder() + .add_edge( + worker, reviewer + ) # <--- This edge allows the worker to send requests to the reviewer + .add_edge( + reviewer, worker + ) # <--- This edge allows the reviewer to send feedback back to the worker + .set_start_executor(worker) + .build() + .as_agent() # Convert the workflow to an agent. + ) + + +async def main() -> None: + async with DefaultAzureCredential() as credential: + async with AzureAIAgentClient(async_credential=credential) as chat_client: + agent = build_agent(chat_client) + await from_agent_framework(agent).run_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-agentserver-agentframework/tests/__init__.py b/sdk/ai/azure-ai-agentserver-agentframework/tests/__init__.py new file mode 100644 index 000000000000..4a5d26360bce --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/tests/__init__.py @@ -0,0 +1 @@ +# Unit tests package diff --git a/sdk/ai/azure-ai-agentserver-agentframework/tests/conftest.py b/sdk/ai/azure-ai-agentserver-agentframework/tests/conftest.py new file mode 100644 index 000000000000..a56a7164c0a3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/tests/conftest.py @@ -0,0 +1,15 @@ +""" +Pytest configuration and shared fixtures for unit tests. +""" + +import sys +from pathlib import Path + +# Ensure package sources are importable during tests +tests_root = Path(__file__).resolve() +src_root = tests_root.parents[4] +packages_root = tests_root.parents[2] / "packages" + +for path in (packages_root, src_root): + if str(path) not in sys.path: + sys.path.insert(0, str(path)) diff --git a/sdk/ai/azure-ai-agentserver-agentframework/tests/unit_tests/test_agent_framework_input_converter.py b/sdk/ai/azure-ai-agentserver-agentframework/tests/unit_tests/test_agent_framework_input_converter.py new file mode 100644 index 000000000000..3dab36131f8d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-agentframework/tests/unit_tests/test_agent_framework_input_converter.py @@ -0,0 +1,139 @@ +import importlib + +import pytest + +from agent_framework import ChatMessage, Role as ChatRole + +converter_module = importlib.import_module( + "azure.ai.agentserver.agentframework.models.agent_framework_input_converters" +) +AgentFrameworkInputConverter = converter_module.AgentFrameworkInputConverter + + +@pytest.fixture() +def converter() -> AgentFrameworkInputConverter: + return AgentFrameworkInputConverter() + + +@pytest.mark.unit +def test_transform_none_returns_none(converter: AgentFrameworkInputConverter) -> None: + assert converter.transform_input(None) is None + + +@pytest.mark.unit +def test_transform_string_returns_same(converter: AgentFrameworkInputConverter) -> None: + assert converter.transform_input("hello") == "hello" + + +@pytest.mark.unit +def test_transform_implicit_user_message_with_string(converter: AgentFrameworkInputConverter) -> None: + payload = [{"content": "How are you?"}] + + result = converter.transform_input(payload) + + assert result == "How are you?" + + +@pytest.mark.unit +def test_transform_implicit_user_message_with_input_text_list(converter: AgentFrameworkInputConverter) -> None: + payload = [ + { + "content": [ + {"type": "input_text", "text": "Hello"}, + {"type": "input_text", "text": "world"}, + ] + } + ] + + result = converter.transform_input(payload) + + assert result == "Hello world" + + +@pytest.mark.unit +def test_transform_explicit_message_returns_chat_message(converter: AgentFrameworkInputConverter) -> None: + payload = [ + { + "type": "message", + "role": "assistant", + "content": [ + {"type": "input_text", "text": "Hi there"}, + ], + } + ] + + result = converter.transform_input(payload) + + assert isinstance(result, ChatMessage) + assert result.role == ChatRole.ASSISTANT + assert result.text == "Hi there" + + +@pytest.mark.unit +def test_transform_multiple_explicit_messages_returns_list(converter: AgentFrameworkInputConverter) -> None: + payload = [ + { + "type": "message", + "role": "user", + "content": "Hello", + }, + { + "type": "message", + "role": "assistant", + "content": [ + {"type": "input_text", "text": "Greetings"}, + ], + }, + ] + + result = converter.transform_input(payload) + + assert isinstance(result, list) + assert len(result) == 2 + assert all(isinstance(item, ChatMessage) for item in result) + assert result[0].role == ChatRole.USER + assert result[0].text == "Hello" + assert result[1].role == ChatRole.ASSISTANT + assert result[1].text == "Greetings" + + +@pytest.mark.unit +def test_transform_mixed_messages_coerces_to_strings(converter: AgentFrameworkInputConverter) -> None: + payload = [ + {"content": "First"}, + { + "type": "message", + "role": "assistant", + "content": [ + {"type": "input_text", "text": "Second"}, + ], + }, + ] + + result = converter.transform_input(payload) + + assert result == ["First", "Second"] + + +@pytest.mark.unit +def test_transform_invalid_input_type_raises(converter: AgentFrameworkInputConverter) -> None: + with pytest.raises(Exception) as exc_info: + converter.transform_input({"content": "invalid"}) + + assert "Unsupported input type" in str(exc_info.value) + + +@pytest.mark.unit +def test_transform_skips_non_text_entries(converter: AgentFrameworkInputConverter) -> None: + payload = [ + { + "content": [ + {"type": "input_text", "text": 123}, + {"type": "image", "url": "https://example.com"}, + ] + } + ] + + result = converter.transform_input(payload) + + assert result is None diff --git a/sdk/ai/azure-ai-agentserver-core/CHANGELOG.md b/sdk/ai/azure-ai-agentserver-core/CHANGELOG.md new file mode 100644 index 000000000000..7ce1742693b8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/CHANGELOG.md @@ -0,0 +1,7 @@ +# Release History + +## 1.0.0a1 (2025-11-06) + +### Features Added + +First version diff --git a/sdk/ai/azure-ai-agentserver-core/LICENSE b/sdk/ai/azure-ai-agentserver-core/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/MANIFEST.in b/sdk/ai/azure-ai-agentserver-core/MANIFEST.in new file mode 100644 index 000000000000..eefbfbed7925 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/MANIFEST.in @@ -0,0 +1,9 @@ +include *.md +include LICENSE +recursive-include tests *.py +recursive-include samples *.py *.md +recursive-include doc *.rst *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/agentserver/__init__.py +include azure/ai/agentserver/core/py.typed diff --git a/sdk/ai/azure-ai-agentserver-core/README.md b/sdk/ai/azure-ai-agentserver-core/README.md new file mode 100644 index 000000000000..723c37ad65d4 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/README.md @@ -0,0 +1,119 @@ +# Azure AI Agent-framework Agent Server Adapter for Python + + +## Getting started + +```bash +pip install azure-ai-agentserver-core +``` + +## Key concepts + +This is the core package for Azure AI Agent server. It hosts your agent as a container on the cloud. + +You can talk to your agent using azure-ai-project sdk. + + +## Examples + +If your agent is not built using a supported framework such as LangGraph and Agent-framework, you can still make it compatible with Microsoft AI Foundry by manually implementing the predefined interface. + +```python +import datetime + +from azure.ai.agentserver.core import FoundryCBAgent +from azure.ai.agentserver.core.models import ( + CreateResponse, + Response as OpenAIResponse, +) +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + + +def stream_events(text: str): + assembled = "" + for i, token in enumerate(text.split(" ")): + piece = token if i == len(text.split(" ")) - 1 else token + " " + assembled += piece + yield ResponseTextDeltaEvent(delta=piece) + # Done with text + yield ResponseTextDoneEvent(text=assembled) + + +async def agent_run(request_body: CreateResponse): + agent = request_body.agent + print(f"agent:{agent}") + + if request_body.stream: + return stream_events("I am mock agent with no intelligence in stream mode.") + + # Build assistant output content + output_content = [ + ItemContentOutputText( + text="I am mock agent with no intelligence.", + annotations=[], + ) + ] + + response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="me", + id="id", + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + status="completed", + content=output_content, + ) + ], + ) + return response + + +my_agent = FoundryCBAgent() +my_agent.agent_run = agent_run + +if __name__ == "__main__": + my_agent.run() + +``` + +## Troubleshooting + +First run your agent with azure-ai-agentserver-core locally. + +If it works on local by failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project. + + +### Reporting issues + +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). Mention the package name "azure-ai-agents" in the title or content. + + +## Next steps + +Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agentserver-core/samples) folder. There are several cases for you to build your agent with azure-ai-agentserver + + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. diff --git a/sdk/ai/azure-ai-agentserver-core/azure/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py new file mode 100644 index 000000000000..895074d32ae3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py @@ -0,0 +1,14 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +__path__ = __import__("pkgutil").extend_path(__path__, __name__) + +from ._version import VERSION +from .logger import configure as config_logging +from .server.base import FoundryCBAgent +from .server.common.agent_run_context import AgentRunContext + +config_logging() + +__all__ = ["FoundryCBAgent", "AgentRunContext"] +__version__ = VERSION diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py new file mode 100644 index 000000000000..44465a1b2f12 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0a1" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py new file mode 100644 index 000000000000..a13f23aa261e --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py @@ -0,0 +1,14 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +class Constants: + # well-known environment variables + APPLICATION_INSIGHTS_CONNECTION_STRING = "_AGENT_RUNTIME_APP_INSIGHTS_CONNECTION_STRING" + AZURE_AI_PROJECT_ENDPOINT = "AZURE_AI_PROJECT_ENDPOINT" + AGENT_ID = "AGENT_ID" + AGENT_NAME = "AGENT_NAME" + AGENT_PROJECT_RESOURCE_ID = "AGENT_PROJECT_NAME" + OTEL_EXPORTER_ENDPOINT = "OTEL_EXPORTER_ENDPOINT" + AGENT_LOG_LEVEL = "AGENT_LOG_LEVEL" + AGENT_DEBUG_ERRORS = "AGENT_DEBUG_ERRORS" + ENABLE_APPLICATION_INSIGHTS_LOGGER = "ENABLE_APPLICATION_INSIGHTS_LOGGER" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py new file mode 100644 index 000000000000..cefed9f5ef00 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py @@ -0,0 +1,159 @@ +# pylint: disable=broad-exception-caught,dangerous-default-value +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import contextvars +import logging +import os +from logging import config + +from ._version import VERSION +from .constants import Constants + +default_log_config = { + "version": 1, + "disable_existing_loggers": False, + "loggers": { + "azure.ai.agentshosting": { + "handlers": ["console"], + "level": "INFO", + "propagate": False, + }, + }, + "handlers": { + "console": {"formatter": "std_out", "class": "logging.StreamHandler", "level": "INFO"}, + }, + "formatters": {"std_out": {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"}}, +} + +request_context = contextvars.ContextVar("request_context", default=None) + + +def get_dimensions(): + env_values = {name: value for name, value in vars(Constants).items() if not name.startswith("_")} + res = {"azure.ai.agentshosting.version": VERSION} + for name, env_name in env_values.items(): + if isinstance(env_name, str) and not env_name.startswith("_"): + runtime_value = os.environ.get(env_name) + if runtime_value: + res[f"azure.ai.agentshosting.{name.lower()}"] = runtime_value + return res + + +def get_project_endpoint(): + project_resource_id = os.environ.get(Constants.AGENT_PROJECT_RESOURCE_ID) + if project_resource_id: + last_part = project_resource_id.split("/")[-1] + + parts = last_part.split("@") + if len(parts) < 2: + print(f"invalid project resource id: {project_resource_id}") + return None + account = parts[0] + project = parts[1] + return f"https://{account}.services.ai.azure.com/api/projects/{project}" + print("environment variable AGENT_PROJECT_RESOURCE_ID not set.") + return None + + +def get_application_insights_connstr(): + try: + conn_str = os.environ.get(Constants.APPLICATION_INSIGHTS_CONNECTION_STRING) + if not conn_str: + print("environment variable APPLICATION_INSIGHTS_CONNECTION_STRING not set.") + project_endpoint = get_project_endpoint() + if project_endpoint: + # try to get the project connected application insights + from azure.ai.projects import AIProjectClient + from azure.identity import DefaultAzureCredential + + project_client = AIProjectClient(credential=DefaultAzureCredential(), endpoint=project_endpoint) + conn_str = project_client.telemetry.get_application_insights_connection_string() + if not conn_str: + print(f"no connected application insights found for project:{project_endpoint}") + else: + os.environ[Constants.APPLICATION_INSIGHTS_CONNECTION_STRING] = conn_str + return conn_str + except Exception as e: + print(f"failed to get application insights with error: {e}") + return None + + +class CustomDimensionsFilter(logging.Filter): + def filter(self, record): + # Add custom dimensions to every log record + dimensions = get_dimensions() + for key, value in dimensions.items(): + setattr(record, key, value) + cur_request_context = request_context.get() + if cur_request_context: + for key, value in cur_request_context.items(): + setattr(record, key, value) + return True + + +def configure(log_config: dict = default_log_config): + """ + Configure logging based on the provided configuration dictionary. + The dictionary should contain the logging configuration in a format compatible with `logging.config.dictConfig`. + + :param log_config: A dictionary containing logging configuration. + :type log_config: dict + """ + try: + config.dictConfig(log_config) + + application_insights_connection_string = get_application_insights_connstr() + enable_application_insights_logger = ( + os.environ.get(Constants.ENABLE_APPLICATION_INSIGHTS_LOGGER, "true").lower() == "true" + ) + if application_insights_connection_string and enable_application_insights_logger: + from opentelemetry._logs import set_logger_provider + from opentelemetry.sdk._logs import ( + LoggerProvider, + LoggingHandler, + ) + from opentelemetry.sdk._logs.export import BatchLogRecordProcessor + from opentelemetry.sdk.resources import Resource + + from azure.monitor.opentelemetry.exporter import AzureMonitorLogExporter + + logger_provider = LoggerProvider(resource=Resource.create({"service.name": "azure.ai.agentshosting"})) + set_logger_provider(logger_provider) + + exporter = AzureMonitorLogExporter(connection_string=application_insights_connection_string) + + logger_provider.add_log_record_processor(BatchLogRecordProcessor(exporter)) + handler = LoggingHandler(logger_provider=logger_provider) + handler.name = "appinsights_handler" + + # Add custom filter to inject dimensions + custom_filter = CustomDimensionsFilter() + handler.addFilter(custom_filter) + + # Only add to azure.ai.agentshosting namespace to avoid infrastructure logs + app_logger = logging.getLogger("azure.ai.agentshosting") + app_logger.setLevel(get_log_level()) + app_logger.addHandler(handler) + + except Exception as e: + print(f"Failed to configure logging: {e}") + + +def get_log_level(): + log_level = os.getenv(Constants.AGENT_LOG_LEVEL, "INFO").upper() + valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] + if log_level not in valid_levels: + print(f"Invalid log level '{log_level}' specified. Defaulting to 'INFO'.") + log_level = "INFO" + return log_level + + +def get_logger() -> logging.Logger: + """ + If the logger is not already configured, it will be initialized with default settings. + + :return: Configured logger instance. + :rtype: logging.Logger + """ + return logging.getLogger("azure.ai.agentshosting") diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py new file mode 100644 index 000000000000..d5622ebe7732 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py @@ -0,0 +1,7 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from ._create_response import CreateResponse # type: ignore +from .projects import Response, ResponseStreamEvent + +__all__ = ["CreateResponse", "Response", "ResponseStreamEvent"] # type: ignore[var-annotated] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py new file mode 100644 index 000000000000..a38f55408c7f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py @@ -0,0 +1,12 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=no-name-in-module +from typing import Optional + +from .openai import response_create_params # type: ignore +from . import projects as _azure_ai_projects_models + +class CreateResponse(response_create_params.ResponseCreateParamsBase, total=False): # type: ignore + agent: Optional[_azure_ai_projects_models.AgentReference] + stream: Optional[bool] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py new file mode 100644 index 000000000000..ecf2179f53b7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py @@ -0,0 +1,16 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +""" +Re-exports of OpenAI SDK response types. + +This module re-exports types from the OpenAI SDK for convenience. +These types are fully documented in the OpenAI SDK documentation. + +.. note:: + This module re-exports OpenAI SDK types. For detailed documentation, + please refer to the `OpenAI Python SDK documentation `_. +""" +from openai.types.responses import * # pylint: disable=unused-wildcard-import + +__all__ = [name for name in globals() if not name.startswith("_")] # type: ignore[var-annotated] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py new file mode 100644 index 000000000000..f65ea1133818 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py @@ -0,0 +1,820 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + A2ATool, + AISearchIndexResource, + AgentClusterInsightResult, + AgentClusterInsightsRequest, + AgentContainerObject, + AgentContainerOperationError, + AgentContainerOperationObject, + AgentDefinition, + AgentId, + AgentObject, + AgentObjectVersions, + AgentReference, + AgentTaxonomyInput, + AgentVersionObject, + AgenticIdentityCredentials, + Annotation, + AnnotationFileCitation, + AnnotationFilePath, + AnnotationUrlCitation, + ApiError, + ApiErrorResponse, + ApiInnerError, + ApiKeyCredentials, + ApproximateLocation, + AzureAIAgentTarget, + AzureAISearchAgentTool, + AzureAISearchIndex, + AzureAISearchToolResource, + AzureFunctionAgentTool, + AzureFunctionBinding, + AzureFunctionDefinition, + AzureFunctionDefinitionFunction, + AzureFunctionStorageQueue, + AzureOpenAIModelConfiguration, + BaseCredentials, + BingCustomSearchAgentTool, + BingCustomSearchConfiguration, + BingCustomSearchToolParameters, + BingGroundingAgentTool, + BingGroundingSearchConfiguration, + BingGroundingSearchToolParameters, + BlobReference, + BlobReferenceSasCredential, + BrowserAutomationAgentTool, + BrowserAutomationToolConnectionParameters, + BrowserAutomationToolParameters, + CaptureStructuredOutputsTool, + ChartCoordinate, + ChatSummaryMemoryItem, + ClusterInsightResult, + ClusterTokenUsage, + CodeBasedEvaluatorDefinition, + CodeInterpreterOutput, + CodeInterpreterOutputImage, + CodeInterpreterOutputLogs, + CodeInterpreterTool, + CodeInterpreterToolAuto, + CodeInterpreterToolCallItemParam, + CodeInterpreterToolCallItemResource, + ComparisonFilter, + CompoundFilter, + ComputerAction, + ComputerActionClick, + ComputerActionDoubleClick, + ComputerActionDrag, + ComputerActionKeyPress, + ComputerActionMove, + ComputerActionScreenshot, + ComputerActionScroll, + ComputerActionTypeKeys, + ComputerActionWait, + ComputerToolCallItemParam, + ComputerToolCallItemResource, + ComputerToolCallOutputItemOutput, + ComputerToolCallOutputItemOutputComputerScreenshot, + ComputerToolCallOutputItemParam, + ComputerToolCallOutputItemResource, + ComputerToolCallSafetyCheck, + ComputerUsePreviewTool, + Connection, + ContainerAppAgentDefinition, + ContinuousEvaluationRuleAction, + Coordinate, + CosmosDBIndex, + CreatedBy, + CronTrigger, + CustomCredential, + DailyRecurrenceSchedule, + DatasetCredential, + DatasetVersion, + DeleteAgentResponse, + DeleteAgentVersionResponse, + DeleteMemoryStoreResponse, + Deployment, + EmbeddingConfiguration, + EntraIDCredentials, + EvalCompareReport, + EvalResult, + EvalRunResultCompareItem, + EvalRunResultComparison, + EvalRunResultSummary, + EvaluationComparisonRequest, + EvaluationResultSample, + EvaluationRule, + EvaluationRuleAction, + EvaluationRuleFilter, + EvaluationRunClusterInsightResult, + EvaluationRunClusterInsightsRequest, + EvaluationScheduleTask, + EvaluationTaxonomy, + EvaluationTaxonomyInput, + EvaluatorDefinition, + EvaluatorMetric, + EvaluatorVersion, + FabricDataAgentToolParameters, + FieldMapping, + FileDatasetVersion, + FileSearchTool, + FileSearchToolCallItemParam, + FileSearchToolCallItemParamResult, + FileSearchToolCallItemResource, + FolderDatasetVersion, + FunctionTool, + FunctionToolCallItemParam, + FunctionToolCallItemResource, + FunctionToolCallOutputItemParam, + FunctionToolCallOutputItemResource, + HostedAgentDefinition, + HourlyRecurrenceSchedule, + HumanEvaluationRuleAction, + ImageBasedHostedAgentDefinition, + ImageGenTool, + ImageGenToolCallItemParam, + ImageGenToolCallItemResource, + ImageGenToolInputImageMask, + Index, + Insight, + InsightCluster, + InsightModelConfiguration, + InsightRequest, + InsightResult, + InsightSample, + InsightScheduleTask, + InsightSummary, + InsightsMetadata, + InvokeAzureAgentWorkflowActionOutputItemResource, + ItemContent, + ItemContentInputAudio, + ItemContentInputFile, + ItemContentInputImage, + ItemContentInputText, + ItemContentOutputAudio, + ItemContentOutputText, + ItemContentRefusal, + ItemParam, + ItemReferenceItemParam, + ItemResource, + LocalShellExecAction, + LocalShellTool, + LocalShellToolCallItemParam, + LocalShellToolCallItemResource, + LocalShellToolCallOutputItemParam, + LocalShellToolCallOutputItemResource, + Location, + LogProb, + MCPApprovalRequestItemParam, + MCPApprovalRequestItemResource, + MCPApprovalResponseItemParam, + MCPApprovalResponseItemResource, + MCPCallItemParam, + MCPCallItemResource, + MCPListToolsItemParam, + MCPListToolsItemResource, + MCPListToolsTool, + MCPTool, + MCPToolAllowedTools1, + MCPToolRequireApproval1, + MCPToolRequireApprovalAlways, + MCPToolRequireApprovalNever, + ManagedAzureAISearchIndex, + MemoryItem, + MemoryOperation, + MemorySearchItem, + MemorySearchOptions, + MemorySearchTool, + MemorySearchToolCallItemParam, + MemorySearchToolCallItemResource, + MemoryStoreDefaultDefinition, + MemoryStoreDefaultOptions, + MemoryStoreDefinition, + MemoryStoreDeleteScopeResponse, + MemoryStoreObject, + MemoryStoreOperationUsage, + MemoryStoreOperationUsageInputTokensDetails, + MemoryStoreOperationUsageOutputTokensDetails, + MemoryStoreSearchResponse, + MemoryStoreUpdateResponse, + MemoryStoreUpdateResult, + MicrosoftFabricAgentTool, + ModelDeployment, + ModelDeploymentSku, + MonthlyRecurrenceSchedule, + NoAuthenticationCredentials, + OAuthConsentRequestItemResource, + OneTimeTrigger, + OpenApiAgentTool, + OpenApiAnonymousAuthDetails, + OpenApiAuthDetails, + OpenApiFunctionDefinition, + OpenApiFunctionDefinitionFunction, + OpenApiManagedAuthDetails, + OpenApiManagedSecurityScheme, + OpenApiProjectConnectionAuthDetails, + OpenApiProjectConnectionSecurityScheme, + PagedScheduleRun, + PendingUploadRequest, + PendingUploadResponse, + Prompt, + PromptAgentDefinition, + PromptAgentDefinitionText, + PromptBasedEvaluatorDefinition, + ProtocolVersionRecord, + RaiConfig, + RankingOptions, + Reasoning, + ReasoningItemParam, + ReasoningItemResource, + ReasoningItemSummaryPart, + ReasoningItemSummaryTextPart, + RecurrenceSchedule, + RecurrenceTrigger, + RedTeam, + Response, + ResponseCodeInterpreterCallCodeDeltaEvent, + ResponseCodeInterpreterCallCodeDoneEvent, + ResponseCodeInterpreterCallCompletedEvent, + ResponseCodeInterpreterCallInProgressEvent, + ResponseCodeInterpreterCallInterpretingEvent, + ResponseCompletedEvent, + ResponseContentPartAddedEvent, + ResponseContentPartDoneEvent, + ResponseConversation1, + ResponseCreatedEvent, + ResponseError, + ResponseErrorEvent, + ResponseFailedEvent, + ResponseFileSearchCallCompletedEvent, + ResponseFileSearchCallInProgressEvent, + ResponseFileSearchCallSearchingEvent, + ResponseFormatJsonSchemaSchema, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseImageGenCallCompletedEvent, + ResponseImageGenCallGeneratingEvent, + ResponseImageGenCallInProgressEvent, + ResponseImageGenCallPartialImageEvent, + ResponseInProgressEvent, + ResponseIncompleteDetails1, + ResponseIncompleteEvent, + ResponseMCPCallArgumentsDeltaEvent, + ResponseMCPCallArgumentsDoneEvent, + ResponseMCPCallCompletedEvent, + ResponseMCPCallFailedEvent, + ResponseMCPCallInProgressEvent, + ResponseMCPListToolsCompletedEvent, + ResponseMCPListToolsFailedEvent, + ResponseMCPListToolsInProgressEvent, + ResponseOutputItemAddedEvent, + ResponseOutputItemDoneEvent, + ResponsePromptVariables, + ResponseQueuedEvent, + ResponseReasoningDeltaEvent, + ResponseReasoningDoneEvent, + ResponseReasoningSummaryDeltaEvent, + ResponseReasoningSummaryDoneEvent, + ResponseReasoningSummaryPartAddedEvent, + ResponseReasoningSummaryPartDoneEvent, + ResponseReasoningSummaryTextDeltaEvent, + ResponseReasoningSummaryTextDoneEvent, + ResponseRefusalDeltaEvent, + ResponseRefusalDoneEvent, + ResponseStreamEvent, + ResponseText, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, + ResponseTextFormatConfiguration, + ResponseTextFormatConfigurationJsonObject, + ResponseTextFormatConfigurationJsonSchema, + ResponseTextFormatConfigurationText, + ResponseUsage, + ResponseWebSearchCallCompletedEvent, + ResponseWebSearchCallInProgressEvent, + ResponseWebSearchCallSearchingEvent, + ResponsesAssistantMessageItemParam, + ResponsesAssistantMessageItemResource, + ResponsesDeveloperMessageItemParam, + ResponsesDeveloperMessageItemResource, + ResponsesMessageItemParam, + ResponsesMessageItemResource, + ResponsesSystemMessageItemParam, + ResponsesSystemMessageItemResource, + ResponsesUserMessageItemParam, + ResponsesUserMessageItemResource, + SASCredentials, + Schedule, + ScheduleRun, + ScheduleTask, + SharepointAgentTool, + SharepointGroundingToolParameters, + StructuredInputDefinition, + StructuredOutputDefinition, + StructuredOutputsItemResource, + Target, + TargetConfig, + TaxonomyCategory, + TaxonomySubCategory, + Tool, + ToolArgumentBinding, + ToolChoiceObject, + ToolChoiceObjectCodeInterpreter, + ToolChoiceObjectComputer, + ToolChoiceObjectFileSearch, + ToolChoiceObjectFunction, + ToolChoiceObjectImageGen, + ToolChoiceObjectMCP, + ToolChoiceObjectWebSearch, + ToolDescription, + ToolProjectConnection, + ToolProjectConnectionList, + TopLogProb, + Trigger, + UserProfileMemoryItem, + VectorStoreFileAttributes, + WebSearchAction, + WebSearchActionFind, + WebSearchActionOpenPage, + WebSearchActionSearch, + WebSearchPreviewTool, + WebSearchToolCallItemParam, + WebSearchToolCallItemResource, + WeeklyRecurrenceSchedule, + WorkflowActionOutputItemResource, + WorkflowDefinition, +) + +from ._enums import ( # type: ignore + AgentContainerOperationStatus, + AgentContainerStatus, + AgentKind, + AgentProtocol, + AnnotationType, + AttackStrategy, + AzureAISearchQueryType, + CodeInterpreterOutputType, + ComputerActionType, + ComputerToolCallOutputItemOutputType, + ConnectionType, + CredentialType, + DatasetType, + DayOfWeek, + DeploymentType, + EvaluationRuleActionType, + EvaluationRuleEventType, + EvaluationTaxonomyInputType, + EvaluatorCategory, + EvaluatorDefinitionType, + EvaluatorMetricDirection, + EvaluatorMetricType, + EvaluatorType, + IndexType, + InsightType, + ItemContentType, + ItemType, + LocationType, + MemoryItemKind, + MemoryOperationKind, + MemoryStoreKind, + MemoryStoreUpdateStatus, + OpenApiAuthType, + OperationState, + PendingUploadType, + ReasoningEffort, + ReasoningItemSummaryPartType, + RecurrenceType, + ResponseErrorCode, + ResponseStreamEventType, + ResponseTextFormatConfigurationType, + ResponsesMessageRole, + RiskCategory, + SampleType, + ScheduleProvisioningStatus, + ScheduleTaskType, + ServiceTier, + ToolChoiceObjectType, + ToolChoiceOptions, + ToolType, + TreatmentEffectType, + TriggerType, + WebSearchActionType, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "A2ATool", + "AISearchIndexResource", + "AgentClusterInsightResult", + "AgentClusterInsightsRequest", + "AgentContainerObject", + "AgentContainerOperationError", + "AgentContainerOperationObject", + "AgentDefinition", + "AgentId", + "AgentObject", + "AgentObjectVersions", + "AgentReference", + "AgentTaxonomyInput", + "AgentVersionObject", + "AgenticIdentityCredentials", + "Annotation", + "AnnotationFileCitation", + "AnnotationFilePath", + "AnnotationUrlCitation", + "ApiError", + "ApiErrorResponse", + "ApiInnerError", + "ApiKeyCredentials", + "ApproximateLocation", + "AzureAIAgentTarget", + "AzureAISearchAgentTool", + "AzureAISearchIndex", + "AzureAISearchToolResource", + "AzureFunctionAgentTool", + "AzureFunctionBinding", + "AzureFunctionDefinition", + "AzureFunctionDefinitionFunction", + "AzureFunctionStorageQueue", + "AzureOpenAIModelConfiguration", + "BaseCredentials", + "BingCustomSearchAgentTool", + "BingCustomSearchConfiguration", + "BingCustomSearchToolParameters", + "BingGroundingAgentTool", + "BingGroundingSearchConfiguration", + "BingGroundingSearchToolParameters", + "BlobReference", + "BlobReferenceSasCredential", + "BrowserAutomationAgentTool", + "BrowserAutomationToolConnectionParameters", + "BrowserAutomationToolParameters", + "CaptureStructuredOutputsTool", + "ChartCoordinate", + "ChatSummaryMemoryItem", + "ClusterInsightResult", + "ClusterTokenUsage", + "CodeBasedEvaluatorDefinition", + "CodeInterpreterOutput", + "CodeInterpreterOutputImage", + "CodeInterpreterOutputLogs", + "CodeInterpreterTool", + "CodeInterpreterToolAuto", + "CodeInterpreterToolCallItemParam", + "CodeInterpreterToolCallItemResource", + "ComparisonFilter", + "CompoundFilter", + "ComputerAction", + "ComputerActionClick", + "ComputerActionDoubleClick", + "ComputerActionDrag", + "ComputerActionKeyPress", + "ComputerActionMove", + "ComputerActionScreenshot", + "ComputerActionScroll", + "ComputerActionTypeKeys", + "ComputerActionWait", + "ComputerToolCallItemParam", + "ComputerToolCallItemResource", + "ComputerToolCallOutputItemOutput", + "ComputerToolCallOutputItemOutputComputerScreenshot", + "ComputerToolCallOutputItemParam", + "ComputerToolCallOutputItemResource", + "ComputerToolCallSafetyCheck", + "ComputerUsePreviewTool", + "Connection", + "ContainerAppAgentDefinition", + "ContinuousEvaluationRuleAction", + "Coordinate", + "CosmosDBIndex", + "CreatedBy", + "CronTrigger", + "CustomCredential", + "DailyRecurrenceSchedule", + "DatasetCredential", + "DatasetVersion", + "DeleteAgentResponse", + "DeleteAgentVersionResponse", + "DeleteMemoryStoreResponse", + "Deployment", + "EmbeddingConfiguration", + "EntraIDCredentials", + "EvalCompareReport", + "EvalResult", + "EvalRunResultCompareItem", + "EvalRunResultComparison", + "EvalRunResultSummary", + "EvaluationComparisonRequest", + "EvaluationResultSample", + "EvaluationRule", + "EvaluationRuleAction", + "EvaluationRuleFilter", + "EvaluationRunClusterInsightResult", + "EvaluationRunClusterInsightsRequest", + "EvaluationScheduleTask", + "EvaluationTaxonomy", + "EvaluationTaxonomyInput", + "EvaluatorDefinition", + "EvaluatorMetric", + "EvaluatorVersion", + "FabricDataAgentToolParameters", + "FieldMapping", + "FileDatasetVersion", + "FileSearchTool", + "FileSearchToolCallItemParam", + "FileSearchToolCallItemParamResult", + "FileSearchToolCallItemResource", + "FolderDatasetVersion", + "FunctionTool", + "FunctionToolCallItemParam", + "FunctionToolCallItemResource", + "FunctionToolCallOutputItemParam", + "FunctionToolCallOutputItemResource", + "HostedAgentDefinition", + "HourlyRecurrenceSchedule", + "HumanEvaluationRuleAction", + "ImageBasedHostedAgentDefinition", + "ImageGenTool", + "ImageGenToolCallItemParam", + "ImageGenToolCallItemResource", + "ImageGenToolInputImageMask", + "Index", + "Insight", + "InsightCluster", + "InsightModelConfiguration", + "InsightRequest", + "InsightResult", + "InsightSample", + "InsightScheduleTask", + "InsightSummary", + "InsightsMetadata", + "InvokeAzureAgentWorkflowActionOutputItemResource", + "ItemContent", + "ItemContentInputAudio", + "ItemContentInputFile", + "ItemContentInputImage", + "ItemContentInputText", + "ItemContentOutputAudio", + "ItemContentOutputText", + "ItemContentRefusal", + "ItemParam", + "ItemReferenceItemParam", + "ItemResource", + "LocalShellExecAction", + "LocalShellTool", + "LocalShellToolCallItemParam", + "LocalShellToolCallItemResource", + "LocalShellToolCallOutputItemParam", + "LocalShellToolCallOutputItemResource", + "Location", + "LogProb", + "MCPApprovalRequestItemParam", + "MCPApprovalRequestItemResource", + "MCPApprovalResponseItemParam", + "MCPApprovalResponseItemResource", + "MCPCallItemParam", + "MCPCallItemResource", + "MCPListToolsItemParam", + "MCPListToolsItemResource", + "MCPListToolsTool", + "MCPTool", + "MCPToolAllowedTools1", + "MCPToolRequireApproval1", + "MCPToolRequireApprovalAlways", + "MCPToolRequireApprovalNever", + "ManagedAzureAISearchIndex", + "MemoryItem", + "MemoryOperation", + "MemorySearchItem", + "MemorySearchOptions", + "MemorySearchTool", + "MemorySearchToolCallItemParam", + "MemorySearchToolCallItemResource", + "MemoryStoreDefaultDefinition", + "MemoryStoreDefaultOptions", + "MemoryStoreDefinition", + "MemoryStoreDeleteScopeResponse", + "MemoryStoreObject", + "MemoryStoreOperationUsage", + "MemoryStoreOperationUsageInputTokensDetails", + "MemoryStoreOperationUsageOutputTokensDetails", + "MemoryStoreSearchResponse", + "MemoryStoreUpdateResponse", + "MemoryStoreUpdateResult", + "MicrosoftFabricAgentTool", + "ModelDeployment", + "ModelDeploymentSku", + "MonthlyRecurrenceSchedule", + "NoAuthenticationCredentials", + "OAuthConsentRequestItemResource", + "OneTimeTrigger", + "OpenApiAgentTool", + "OpenApiAnonymousAuthDetails", + "OpenApiAuthDetails", + "OpenApiFunctionDefinition", + "OpenApiFunctionDefinitionFunction", + "OpenApiManagedAuthDetails", + "OpenApiManagedSecurityScheme", + "OpenApiProjectConnectionAuthDetails", + "OpenApiProjectConnectionSecurityScheme", + "PagedScheduleRun", + "PendingUploadRequest", + "PendingUploadResponse", + "Prompt", + "PromptAgentDefinition", + "PromptAgentDefinitionText", + "PromptBasedEvaluatorDefinition", + "ProtocolVersionRecord", + "RaiConfig", + "RankingOptions", + "Reasoning", + "ReasoningItemParam", + "ReasoningItemResource", + "ReasoningItemSummaryPart", + "ReasoningItemSummaryTextPart", + "RecurrenceSchedule", + "RecurrenceTrigger", + "RedTeam", + "Response", + "ResponseCodeInterpreterCallCodeDeltaEvent", + "ResponseCodeInterpreterCallCodeDoneEvent", + "ResponseCodeInterpreterCallCompletedEvent", + "ResponseCodeInterpreterCallInProgressEvent", + "ResponseCodeInterpreterCallInterpretingEvent", + "ResponseCompletedEvent", + "ResponseContentPartAddedEvent", + "ResponseContentPartDoneEvent", + "ResponseConversation1", + "ResponseCreatedEvent", + "ResponseError", + "ResponseErrorEvent", + "ResponseFailedEvent", + "ResponseFileSearchCallCompletedEvent", + "ResponseFileSearchCallInProgressEvent", + "ResponseFileSearchCallSearchingEvent", + "ResponseFormatJsonSchemaSchema", + "ResponseFunctionCallArgumentsDeltaEvent", + "ResponseFunctionCallArgumentsDoneEvent", + "ResponseImageGenCallCompletedEvent", + "ResponseImageGenCallGeneratingEvent", + "ResponseImageGenCallInProgressEvent", + "ResponseImageGenCallPartialImageEvent", + "ResponseInProgressEvent", + "ResponseIncompleteDetails1", + "ResponseIncompleteEvent", + "ResponseMCPCallArgumentsDeltaEvent", + "ResponseMCPCallArgumentsDoneEvent", + "ResponseMCPCallCompletedEvent", + "ResponseMCPCallFailedEvent", + "ResponseMCPCallInProgressEvent", + "ResponseMCPListToolsCompletedEvent", + "ResponseMCPListToolsFailedEvent", + "ResponseMCPListToolsInProgressEvent", + "ResponseOutputItemAddedEvent", + "ResponseOutputItemDoneEvent", + "ResponsePromptVariables", + "ResponseQueuedEvent", + "ResponseReasoningDeltaEvent", + "ResponseReasoningDoneEvent", + "ResponseReasoningSummaryDeltaEvent", + "ResponseReasoningSummaryDoneEvent", + "ResponseReasoningSummaryPartAddedEvent", + "ResponseReasoningSummaryPartDoneEvent", + "ResponseReasoningSummaryTextDeltaEvent", + "ResponseReasoningSummaryTextDoneEvent", + "ResponseRefusalDeltaEvent", + "ResponseRefusalDoneEvent", + "ResponseStreamEvent", + "ResponseText", + "ResponseTextDeltaEvent", + "ResponseTextDoneEvent", + "ResponseTextFormatConfiguration", + "ResponseTextFormatConfigurationJsonObject", + "ResponseTextFormatConfigurationJsonSchema", + "ResponseTextFormatConfigurationText", + "ResponseUsage", + "ResponseWebSearchCallCompletedEvent", + "ResponseWebSearchCallInProgressEvent", + "ResponseWebSearchCallSearchingEvent", + "ResponsesAssistantMessageItemParam", + "ResponsesAssistantMessageItemResource", + "ResponsesDeveloperMessageItemParam", + "ResponsesDeveloperMessageItemResource", + "ResponsesMessageItemParam", + "ResponsesMessageItemResource", + "ResponsesSystemMessageItemParam", + "ResponsesSystemMessageItemResource", + "ResponsesUserMessageItemParam", + "ResponsesUserMessageItemResource", + "SASCredentials", + "Schedule", + "ScheduleRun", + "ScheduleTask", + "SharepointAgentTool", + "SharepointGroundingToolParameters", + "StructuredInputDefinition", + "StructuredOutputDefinition", + "StructuredOutputsItemResource", + "Target", + "TargetConfig", + "TaxonomyCategory", + "TaxonomySubCategory", + "Tool", + "ToolArgumentBinding", + "ToolChoiceObject", + "ToolChoiceObjectCodeInterpreter", + "ToolChoiceObjectComputer", + "ToolChoiceObjectFileSearch", + "ToolChoiceObjectFunction", + "ToolChoiceObjectImageGen", + "ToolChoiceObjectMCP", + "ToolChoiceObjectWebSearch", + "ToolDescription", + "ToolProjectConnection", + "ToolProjectConnectionList", + "TopLogProb", + "Trigger", + "UserProfileMemoryItem", + "VectorStoreFileAttributes", + "WebSearchAction", + "WebSearchActionFind", + "WebSearchActionOpenPage", + "WebSearchActionSearch", + "WebSearchPreviewTool", + "WebSearchToolCallItemParam", + "WebSearchToolCallItemResource", + "WeeklyRecurrenceSchedule", + "WorkflowActionOutputItemResource", + "WorkflowDefinition", + "AgentContainerOperationStatus", + "AgentContainerStatus", + "AgentKind", + "AgentProtocol", + "AnnotationType", + "AttackStrategy", + "AzureAISearchQueryType", + "CodeInterpreterOutputType", + "ComputerActionType", + "ComputerToolCallOutputItemOutputType", + "ConnectionType", + "CredentialType", + "DatasetType", + "DayOfWeek", + "DeploymentType", + "EvaluationRuleActionType", + "EvaluationRuleEventType", + "EvaluationTaxonomyInputType", + "EvaluatorCategory", + "EvaluatorDefinitionType", + "EvaluatorMetricDirection", + "EvaluatorMetricType", + "EvaluatorType", + "IndexType", + "InsightType", + "ItemContentType", + "ItemType", + "LocationType", + "MemoryItemKind", + "MemoryOperationKind", + "MemoryStoreKind", + "MemoryStoreUpdateStatus", + "OpenApiAuthType", + "OperationState", + "PendingUploadType", + "ReasoningEffort", + "ReasoningItemSummaryPartType", + "RecurrenceType", + "ResponseErrorCode", + "ResponseStreamEventType", + "ResponseTextFormatConfigurationType", + "ResponsesMessageRole", + "RiskCategory", + "SampleType", + "ScheduleProvisioningStatus", + "ScheduleTaskType", + "ServiceTier", + "ToolChoiceObjectType", + "ToolChoiceOptions", + "ToolType", + "TreatmentEffectType", + "TriggerType", + "WebSearchActionType", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py new file mode 100644 index 000000000000..ea4ebc59efd7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py @@ -0,0 +1,767 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AgentContainerOperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of the container operation for a specific version of an agent.""" + + NOT_STARTED = "NotStarted" + """The container operation is not started.""" + IN_PROGRESS = "InProgress" + """The container operation is in progress.""" + SUCCEEDED = "Succeeded" + """The container operation has succeeded.""" + FAILED = "Failed" + """The container operation has failed.""" + + +class AgentContainerStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of the container of a specific version of an agent.""" + + STARTING = "Starting" + """The container is starting.""" + RUNNING = "Running" + """The container is running.""" + STOPPING = "Stopping" + """The container is stopping.""" + STOPPED = "Stopped" + """The container is stopped.""" + FAILED = "Failed" + """The container has failed.""" + DELETING = "Deleting" + """The container is deleting.""" + DELETED = "Deleted" + """The container is deleted.""" + UPDATING = "Updating" + """The container is updating.""" + + +class AgentKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of AgentKind.""" + + PROMPT = "prompt" + HOSTED = "hosted" + CONTAINER_APP = "container_app" + WORKFLOW = "workflow" + + +class AgentProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of AgentProtocol.""" + + ACTIVITY_PROTOCOL = "activity_protocol" + RESPONSES = "responses" + + +class AnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of AnnotationType.""" + + FILE_CITATION = "file_citation" + URL_CITATION = "url_citation" + FILE_PATH = "file_path" + CONTAINER_FILE_CITATION = "container_file_citation" + + +class AttackStrategy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Strategies for attacks.""" + + EASY = "easy" + """Represents a default set of easy complexity attacks. Easy complexity attacks require less + effort, such as translation of a prompt into some encoding, and does not require any Large + Language Model to convert or orchestrate.""" + MODERATE = "moderate" + """Represents a default set of moderate complexity attacks. Moderate complexity attacks require + having access to resources such as another generative AI model.""" + DIFFICULT = "difficult" + """Represents a default set of difficult complexity attacks. Difficult complexity attacks include + attacks that require access to significant resources and effort to execute an attack such as + knowledge of search-based algorithms in addition to a generative AI model.""" + ASCII_ART = "ascii_art" + """Generates visual art using ASCII characters, often used for creative or obfuscation purposes.""" + ASCII_SMUGGLER = "ascii_smuggler" + """Conceals data within ASCII characters, making it harder to detect.""" + ATBASH = "atbash" + """Implements the Atbash cipher, a simple substitution cipher where each letter is mapped to its + reverse.""" + BASE64 = "base64" + """Encodes binary data into a text format using Base64, commonly used for data transmission.""" + BINARY = "binary" + """Converts text into binary code, representing data in a series of 0s and 1s.""" + CAESAR = "caesar" + """Applies the Caesar cipher, a substitution cipher that shifts characters by a fixed number of + positions.""" + CHARACTER_SPACE = "character_space" + """Alters text by adding spaces between characters, often used for obfuscation.""" + JAILBREAK = "jailbreak" + """Injects specially crafted prompts to bypass AI safeguards, known as User Injected Prompt + Attacks (UPIA).""" + ANSII_ATTACK = "ansii_attack" + """Utilizes ANSI escape sequences to manipulate text appearance and behavior.""" + CHARACTER_SWAP = "character_swap" + """Swaps characters within text to create variations or obfuscate the original content.""" + SUFFIX_APPEND = "suffix_append" + """Appends an adversarial suffix to the prompt.""" + STRING_JOIN = "string_join" + """Joins multiple strings together, often used for concatenation or obfuscation.""" + UNICODE_CONFUSABLE = "unicode_confusable" + """Uses Unicode characters that look similar to standard characters, creating visual confusion.""" + UNICODE_SUBSTITUTION = "unicode_substitution" + """Substitutes standard characters with Unicode equivalents, often for obfuscation.""" + DIACRITIC = "diacritic" + """Adds diacritical marks to characters, changing their appearance and sometimes their meaning.""" + FLIP = "flip" + """Flips characters from front to back, creating a mirrored effect.""" + LEETSPEAK = "leetspeak" + """Transforms text into Leetspeak, a form of encoding that replaces letters with similar-looking + numbers or symbols.""" + ROT13 = "rot13" + """Applies the ROT13 cipher, a simple substitution cipher that shifts characters by 13 positions.""" + MORSE = "morse" + """Encodes text into Morse code, using dots and dashes to represent characters.""" + URL = "url" + """Encodes text into URL format.""" + BASELINE = "baseline" + """Represents the baseline direct adversarial probing, which is used by attack strategies as the + attack objective.""" + + +class AzureAISearchQueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Available query types for Azure AI Search tool.""" + + SIMPLE = "simple" + """Query type ``simple``""" + SEMANTIC = "semantic" + """Query type ``semantic``""" + VECTOR = "vector" + """Query type ``vector``""" + VECTOR_SIMPLE_HYBRID = "vector_simple_hybrid" + """Query type ``vector_simple_hybrid``""" + VECTOR_SEMANTIC_HYBRID = "vector_semantic_hybrid" + """Query type ``vector_semantic_hybrid``""" + + +class CodeInterpreterOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of CodeInterpreterOutputType.""" + + LOGS = "logs" + IMAGE = "image" + + +class ComputerActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ComputerActionType.""" + + SCREENSHOT = "screenshot" + CLICK = "click" + DOUBLE_CLICK = "double_click" + SCROLL = "scroll" + TYPE = "type" + WAIT = "wait" + KEYPRESS = "keypress" + DRAG = "drag" + MOVE = "move" + + +class ComputerToolCallOutputItemOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """A computer screenshot image used with the computer use tool.""" + + SCREENSHOT = "computer_screenshot" + + +class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Type (or category) of the connection.""" + + AZURE_OPEN_AI = "AzureOpenAI" + """Azure OpenAI Service""" + AZURE_BLOB_STORAGE = "AzureBlob" + """Azure Blob Storage, with specified container""" + AZURE_STORAGE_ACCOUNT = "AzureStorageAccount" + """Azure Blob Storage, with container not specified (used by Agents)""" + AZURE_AI_SEARCH = "CognitiveSearch" + """Azure AI Search""" + COSMOS_DB = "CosmosDB" + """CosmosDB""" + API_KEY = "ApiKey" + """Generic connection that uses API Key authentication""" + APPLICATION_CONFIGURATION = "AppConfig" + """Application Configuration""" + APPLICATION_INSIGHTS = "AppInsights" + """Application Insights""" + CUSTOM = "CustomKeys" + """Custom Keys""" + REMOTE_TOOL = "RemoteTool" + """Remote tool""" + + +class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The credential type used by the connection.""" + + API_KEY = "ApiKey" + """API Key credential""" + ENTRA_ID = "AAD" + """Entra ID credential (formerly known as AAD)""" + SAS = "SAS" + """Shared Access Signature (SAS) credential""" + CUSTOM = "CustomKeys" + """Custom credential""" + NONE = "None" + """No credential""" + AGENTIC_IDENTITY = "AgenticIdentityToken" + """Agentic identity credential""" + + +class DatasetType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data.""" + + URI_FILE = "uri_file" + """URI file.""" + URI_FOLDER = "uri_folder" + """URI folder.""" + + +class DayOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Days of the week for recurrence schedule.""" + + SUNDAY = "Sunday" + """Sunday.""" + MONDAY = "Monday" + """Monday.""" + TUESDAY = "Tuesday" + """Tuesday.""" + WEDNESDAY = "Wednesday" + """Wednesday.""" + THURSDAY = "Thursday" + """Thursday.""" + FRIDAY = "Friday" + """Friday.""" + SATURDAY = "Saturday" + """Saturday.""" + + +class DeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of DeploymentType.""" + + MODEL_DEPLOYMENT = "ModelDeployment" + """Model deployment""" + + +class EvaluationRuleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the evaluation action.""" + + CONTINUOUS_EVALUATION = "continuousEvaluation" + """Continuous evaluation.""" + HUMAN_EVALUATION = "humanEvaluation" + """Human evaluation.""" + + +class EvaluationRuleEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the evaluation rule event.""" + + RESPONSE_COMPLETED = "response.completed" + """Response completed.""" + MANUAL = "manual" + """Manual trigger.""" + + +class EvaluationTaxonomyInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the evaluation taxonomy input.""" + + AGENT = "agent" + """Agent""" + POLICY = "policy" + """Policy.""" + + +class EvaluatorCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The category of the evaluator.""" + + QUALITY = "quality" + """Quality""" + SAFETY = "safety" + """Risk & Safety""" + AGENTS = "agents" + """Agents""" + + +class EvaluatorDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of evaluator definition.""" + + PROMPT = "prompt" + """Prompt-based definition""" + CODE = "code" + """Code-based definition""" + PROMPT_AND_CODE = "prompt_and_code" + """Prompt & Code Based definition""" + SERVICE = "service" + """Service-based evaluator""" + OPENAI_GRADERS = "openai_graders" + """OpenAI graders""" + + +class EvaluatorMetricDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The direction of the metric indicating whether a higher value is better, a lower value is + better, or neutral. + """ + + INCREASE = "increase" + """It indicates a higher value is better for this metric""" + DECREASE = "decrease" + """It indicates a lower value is better for this metric""" + NEUTRAL = "neutral" + """It indicates no preference for this metric direction""" + + +class EvaluatorMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the evaluator.""" + + ORDINAL = "ordinal" + """Ordinal metric representing categories that can be ordered or ranked.""" + CONTINUOUS = "continuous" + """Continuous metric representing values in a continuous range.""" + BOOLEAN = "boolean" + """Boolean metric representing true/false values""" + + +class EvaluatorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the evaluator.""" + + BUILT_IN = "builtin" + """Built-in evaluator (Microsoft provided)""" + CUSTOM = "custom" + """Custom evaluator""" + + +class IndexType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of IndexType.""" + + AZURE_SEARCH = "AzureSearch" + """Azure search""" + COSMOS_DB = "CosmosDBNoSqlVectorStore" + """CosmosDB""" + MANAGED_AZURE_SEARCH = "ManagedAzureSearch" + """Managed Azure Search""" + + +class InsightType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The request of the insights.""" + + EVALUATION_RUN_CLUSTER_INSIGHT = "EvaluationRunClusterInsight" + """Insights on an Evaluation run result.""" + AGENT_CLUSTER_INSIGHT = "AgentClusterInsight" + """Cluster Insight on an Agent.""" + EVALUATION_COMPARISON = "EvaluationComparison" + """Evaluation Comparison.""" + + +class ItemContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Multi-modal input and output contents.""" + + INPUT_TEXT = "input_text" + INPUT_AUDIO = "input_audio" + INPUT_IMAGE = "input_image" + INPUT_FILE = "input_file" + OUTPUT_TEXT = "output_text" + OUTPUT_AUDIO = "output_audio" + REFUSAL = "refusal" + + +class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ItemType.""" + + MESSAGE = "message" + FILE_SEARCH_CALL = "file_search_call" + FUNCTION_CALL = "function_call" + FUNCTION_CALL_OUTPUT = "function_call_output" + COMPUTER_CALL = "computer_call" + COMPUTER_CALL_OUTPUT = "computer_call_output" + WEB_SEARCH_CALL = "web_search_call" + REASONING = "reasoning" + ITEM_REFERENCE = "item_reference" + IMAGE_GENERATION_CALL = "image_generation_call" + CODE_INTERPRETER_CALL = "code_interpreter_call" + LOCAL_SHELL_CALL = "local_shell_call" + LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output" + MCP_LIST_TOOLS = "mcp_list_tools" + MCP_APPROVAL_REQUEST = "mcp_approval_request" + MCP_APPROVAL_RESPONSE = "mcp_approval_response" + MCP_CALL = "mcp_call" + STRUCTURED_OUTPUTS = "structured_outputs" + WORKFLOW_ACTION = "workflow_action" + MEMORY_SEARCH_CALL = "memory_search_call" + OAUTH_CONSENT_REQUEST = "oauth_consent_request" + + +class LocationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of LocationType.""" + + APPROXIMATE = "approximate" + + +class MemoryItemKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Memory item kind.""" + + USER_PROFILE = "user_profile" + """User profile information extracted from conversations.""" + CHAT_SUMMARY = "chat_summary" + """Summary of chat conversations.""" + + +class MemoryOperationKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Memory operation kind.""" + + CREATE = "create" + """Create a new memory item.""" + UPDATE = "update" + """Update an existing memory item.""" + DELETE = "delete" + """Delete an existing memory item.""" + + +class MemoryStoreKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of memory store implementation to use.""" + + DEFAULT = "default" + """The default memory store implementation.""" + + +class MemoryStoreUpdateStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of a memory store update operation.""" + + QUEUED = "queued" + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + FAILED = "failed" + SUPERSEDED = "superseded" + + +class OpenApiAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Authentication type for OpenApi endpoint. Allowed types are: + * Anonymous (no authentication required) + * Project Connection (requires project_connection_id to endpoint, as setup in AI Foundry) + * Managed_Identity (requires audience for identity based auth). + """ + + ANONYMOUS = "anonymous" + PROJECT_CONNECTION = "project_connection" + MANAGED_IDENTITY = "managed_identity" + + +class OperationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum describing allowed operation states.""" + + NOT_STARTED = "NotStarted" + """The operation has not started.""" + RUNNING = "Running" + """The operation is in progress.""" + SUCCEEDED = "Succeeded" + """The operation has completed successfully.""" + FAILED = "Failed" + """The operation has failed.""" + CANCELED = "Canceled" + """The operation has been canceled by the user.""" + + +class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of pending upload.""" + + NONE = "None" + """No pending upload.""" + BLOB_REFERENCE = "BlobReference" + """Blob Reference is the only supported type.""" + + +class ReasoningEffort(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """**o-series models only** + Constrains effort on reasoning for + `reasoning models `_. + Currently supported values are ``low``, ``medium``, and ``high``. Reducing + reasoning effort can result in faster responses and fewer tokens used + on reasoning in a response. + """ + + LOW = "low" + MEDIUM = "medium" + HIGH = "high" + + +class ReasoningItemSummaryPartType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ReasoningItemSummaryPartType.""" + + SUMMARY_TEXT = "summary_text" + + +class RecurrenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Recurrence type.""" + + HOURLY = "Hourly" + """Hourly recurrence pattern.""" + DAILY = "Daily" + """Daily recurrence pattern.""" + WEEKLY = "Weekly" + """Weekly recurrence pattern.""" + MONTHLY = "Monthly" + """Monthly recurrence pattern.""" + + +class ResponseErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The error code for the response.""" + + SERVER_ERROR = "server_error" + RATE_LIMIT_EXCEEDED = "rate_limit_exceeded" + INVALID_PROMPT = "invalid_prompt" + VECTOR_STORE_TIMEOUT = "vector_store_timeout" + INVALID_IMAGE = "invalid_image" + INVALID_IMAGE_FORMAT = "invalid_image_format" + INVALID_BASE64_IMAGE = "invalid_base64_image" + INVALID_IMAGE_URL = "invalid_image_url" + IMAGE_TOO_LARGE = "image_too_large" + IMAGE_TOO_SMALL = "image_too_small" + IMAGE_PARSE_ERROR = "image_parse_error" + IMAGE_CONTENT_POLICY_VIOLATION = "image_content_policy_violation" + INVALID_IMAGE_MODE = "invalid_image_mode" + IMAGE_FILE_TOO_LARGE = "image_file_too_large" + UNSUPPORTED_IMAGE_MEDIA_TYPE = "unsupported_image_media_type" + EMPTY_IMAGE_FILE = "empty_image_file" + FAILED_TO_DOWNLOAD_IMAGE = "failed_to_download_image" + IMAGE_FILE_NOT_FOUND = "image_file_not_found" + + +class ResponsesMessageRole(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The collection of valid roles for responses message items.""" + + SYSTEM = "system" + DEVELOPER = "developer" + USER = "user" + ASSISTANT = "assistant" + + +class ResponseStreamEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ResponseStreamEventType.""" + + RESPONSE_AUDIO_DELTA = "response.audio.delta" + RESPONSE_AUDIO_DONE = "response.audio.done" + RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio_transcript.delta" + RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio_transcript.done" + RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA = "response.code_interpreter_call_code.delta" + RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE = "response.code_interpreter_call_code.done" + RESPONSE_CODE_INTERPRETER_CALL_COMPLETED = "response.code_interpreter_call.completed" + RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS = "response.code_interpreter_call.in_progress" + RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING = "response.code_interpreter_call.interpreting" + RESPONSE_COMPLETED = "response.completed" + RESPONSE_CONTENT_PART_ADDED = "response.content_part.added" + RESPONSE_CONTENT_PART_DONE = "response.content_part.done" + RESPONSE_CREATED = "response.created" + ERROR = "error" + RESPONSE_FILE_SEARCH_CALL_COMPLETED = "response.file_search_call.completed" + RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS = "response.file_search_call.in_progress" + RESPONSE_FILE_SEARCH_CALL_SEARCHING = "response.file_search_call.searching" + RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta" + RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done" + RESPONSE_IN_PROGRESS = "response.in_progress" + RESPONSE_FAILED = "response.failed" + RESPONSE_INCOMPLETE = "response.incomplete" + RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added" + RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done" + RESPONSE_REFUSAL_DELTA = "response.refusal.delta" + RESPONSE_REFUSAL_DONE = "response.refusal.done" + RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED = "response.output_text.annotation.added" + RESPONSE_OUTPUT_TEXT_DELTA = "response.output_text.delta" + RESPONSE_OUTPUT_TEXT_DONE = "response.output_text.done" + RESPONSE_REASONING_SUMMARY_PART_ADDED = "response.reasoning_summary_part.added" + RESPONSE_REASONING_SUMMARY_PART_DONE = "response.reasoning_summary_part.done" + RESPONSE_REASONING_SUMMARY_TEXT_DELTA = "response.reasoning_summary_text.delta" + RESPONSE_REASONING_SUMMARY_TEXT_DONE = "response.reasoning_summary_text.done" + RESPONSE_WEB_SEARCH_CALL_COMPLETED = "response.web_search_call.completed" + RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS = "response.web_search_call.in_progress" + RESPONSE_WEB_SEARCH_CALL_SEARCHING = "response.web_search_call.searching" + RESPONSE_IMAGE_GENERATION_CALL_COMPLETED = "response.image_generation_call.completed" + RESPONSE_IMAGE_GENERATION_CALL_GENERATING = "response.image_generation_call.generating" + RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS = "response.image_generation_call.in_progress" + RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE = "response.image_generation_call.partial_image" + RESPONSE_MCP_CALL_ARGUMENTS_DELTA = "response.mcp_call.arguments_delta" + RESPONSE_MCP_CALL_ARGUMENTS_DONE = "response.mcp_call.arguments_done" + RESPONSE_MCP_CALL_COMPLETED = "response.mcp_call.completed" + RESPONSE_MCP_CALL_FAILED = "response.mcp_call.failed" + RESPONSE_MCP_CALL_IN_PROGRESS = "response.mcp_call.in_progress" + RESPONSE_MCP_LIST_TOOLS_COMPLETED = "response.mcp_list_tools.completed" + RESPONSE_MCP_LIST_TOOLS_FAILED = "response.mcp_list_tools.failed" + RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS = "response.mcp_list_tools.in_progress" + RESPONSE_QUEUED = "response.queued" + RESPONSE_REASONING_DELTA = "response.reasoning.delta" + RESPONSE_REASONING_DONE = "response.reasoning.done" + RESPONSE_REASONING_SUMMARY_DELTA = "response.reasoning_summary.delta" + RESPONSE_REASONING_SUMMARY_DONE = "response.reasoning_summary.done" + + +class ResponseTextFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An object specifying the format that the model must output. + Configuring ``{ "type": "json_schema" }`` enables Structured Outputs, + which ensures the model will match your supplied JSON schema. Learn more in the + `Structured Outputs guide `_. + The default format is ``{ "type": "text" }`` with no additional options. + **Not recommended for gpt-4o and newer models:** + Setting to ``{ "type": "json_object" }`` enables the older JSON mode, which + ensures the message the model generates is valid JSON. Using ``json_schema`` + is preferred for models that support it. + """ + + TEXT = "text" + JSON_SCHEMA = "json_schema" + JSON_OBJECT = "json_object" + + +class RiskCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Risk category for the attack objective.""" + + HATE_UNFAIRNESS = "HateUnfairness" + """Represents content related to hate or unfairness.""" + VIOLENCE = "Violence" + """Represents content related to violence.""" + SEXUAL = "Sexual" + """Represents content of a sexual nature.""" + SELF_HARM = "SelfHarm" + """Represents content related to self-harm.""" + + +class SampleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of sample used in the analysis.""" + + EVALUATION_RESULT_SAMPLE = "EvaluationResultSample" + """A sample from the evaluation result.""" + + +class ScheduleProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Schedule provisioning status.""" + + CREATING = "Creating" + """Represents the creation status of the schedule.""" + UPDATING = "Updating" + """Represents the updating status of the schedule.""" + DELETING = "Deleting" + """Represents the deleting status of the schedule.""" + SUCCEEDED = "Succeeded" + """Represents the succeeded status of the schedule.""" + FAILED = "Failed" + """Represents the failed status of the schedule.""" + + +class ScheduleTaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the task.""" + + EVALUATION = "Evaluation" + """Evaluation task.""" + INSIGHT = "Insight" + """Insight task.""" + + +class ServiceTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the processing type used for serving the request. + * If set to 'auto', then the request will be processed with the service tier configured in the + Project settings. Unless otherwise configured, the Project will use 'default'. + * If set to 'default', then the request will be processed with the standard pricing and + performance for the selected model. + * If set to '[flex](/docs/guides/flex-processing)' or 'priority', then the request will be + processed with the corresponding service tier. [Contact + sales](https://openai.com/contact-sales) to learn more about Priority processing. + * When not set, the default behavior is 'auto'. + When the ``service_tier`` parameter is set, the response body will include the + ``service_tier`` value based on the processing mode actually used to serve the request. This + response value may be different from the value set in the parameter. + """ + + AUTO = "auto" + DEFAULT = "default" + FLEX = "flex" + SCALE = "scale" + PRIORITY = "priority" + + +class ToolChoiceObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates that the model should use a built-in tool to generate a response. + `Learn more about built-in tools `_. + """ + + FILE_SEARCH = "file_search" + FUNCTION = "function" + COMPUTER = "computer_use_preview" + WEB_SEARCH = "web_search_preview" + IMAGE_GENERATION = "image_generation" + CODE_INTERPRETER = "code_interpreter" + MCP = "mcp" + + +class ToolChoiceOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Controls which (if any) tool is called by the model. + ``none`` means the model will not call any tool and instead generates a message. + ``auto`` means the model can pick between generating a message or calling one or + more tools. + ``required`` means the model must call one or more tools. + """ + + NONE = "none" + AUTO = "auto" + REQUIRED = "required" + + +class ToolType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """A tool that can be used to generate a response.""" + + FILE_SEARCH = "file_search" + FUNCTION = "function" + COMPUTER_USE_PREVIEW = "computer_use_preview" + WEB_SEARCH_PREVIEW = "web_search_preview" + MCP = "mcp" + CODE_INTERPRETER = "code_interpreter" + IMAGE_GENERATION = "image_generation" + LOCAL_SHELL = "local_shell" + BING_GROUNDING = "bing_grounding" + BROWSER_AUTOMATION_PREVIEW = "browser_automation_preview" + FABRIC_DATAAGENT_PREVIEW = "fabric_dataagent_preview" + SHAREPOINT_GROUNDING_PREVIEW = "sharepoint_grounding_preview" + AZURE_AI_SEARCH = "azure_ai_search" + OPENAPI = "openapi" + BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview" + CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs" + A2_A_PREVIEW = "a2a_preview" + AZURE_FUNCTION = "azure_function" + MEMORY_SEARCH = "memory_search" + + +class TreatmentEffectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Treatment Effect Type.""" + + TOO_FEW_SAMPLES = "TooFewSamples" + """Not enough samples to determine treatment effect.""" + INCONCLUSIVE = "Inconclusive" + """No significant difference between treatment and baseline.""" + CHANGED = "Changed" + """Indicates the metric changed with statistical significance, but the direction is neutral.""" + IMPROVED = "Improved" + """Indicates the treatment significantly improved the metric compared to baseline.""" + DEGRADED = "Degraded" + """Indicates the treatment significantly degraded the metric compared to baseline.""" + + +class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the trigger.""" + + CRON = "Cron" + """Cron based trigger.""" + RECURRENCE = "Recurrence" + """Recurrence based trigger.""" + ONE_TIME = "OneTime" + """One-time trigger.""" + + +class WebSearchActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of WebSearchActionType.""" + + SEARCH = "search" + OPEN_PAGE = "open_page" + FIND = "find" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py new file mode 100644 index 000000000000..a810ddc805c3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py @@ -0,0 +1,15049 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from ._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import ( + AgentKind, + AnnotationType, + CodeInterpreterOutputType, + ComputerActionType, + ComputerToolCallOutputItemOutputType, + CredentialType, + DatasetType, + DeploymentType, + EvaluationRuleActionType, + EvaluationTaxonomyInputType, + EvaluatorDefinitionType, + IndexType, + InsightType, + ItemContentType, + ItemType, + LocationType, + MemoryItemKind, + MemoryStoreKind, + OpenApiAuthType, + PendingUploadType, + ReasoningItemSummaryPartType, + RecurrenceType, + ResponseStreamEventType, + ResponseTextFormatConfigurationType, + ResponsesMessageRole, + SampleType, + ScheduleTaskType, + ToolChoiceObjectType, + ToolType, + TriggerType, + WebSearchActionType, +) + +if TYPE_CHECKING: + from .. import _types, models as _models # type: ignore + + +class Tool(_Model): + """Tool. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + A2ATool, AzureAISearchAgentTool, AzureFunctionAgentTool, BingCustomSearchAgentTool, + BingGroundingAgentTool, BrowserAutomationAgentTool, CaptureStructuredOutputsTool, + CodeInterpreterTool, ComputerUsePreviewTool, MicrosoftFabricAgentTool, FileSearchTool, + FunctionTool, ImageGenTool, LocalShellTool, MCPTool, MemorySearchTool, OpenApiAgentTool, + SharepointAgentTool, WebSearchPreviewTool + + :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", + "web_search_preview", "mcp", "code_interpreter", "image_generation", "local_shell", + "bing_grounding", "browser_automation_preview", "fabric_dataagent_preview", + "sharepoint_grounding_preview", "azure_ai_search", "openapi", "bing_custom_search_preview", + "capture_structured_outputs", "a2a_preview", "azure_function", and "memory_search". + :vartype type: str or ~azure.ai.projects.models.ToolType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", + \"web_search_preview\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\", + \"bing_grounding\", \"browser_automation_preview\", \"fabric_dataagent_preview\", + \"sharepoint_grounding_preview\", \"azure_ai_search\", \"openapi\", + \"bing_custom_search_preview\", \"capture_structured_outputs\", \"a2a_preview\", + \"azure_function\", and \"memory_search\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class A2ATool(Tool, discriminator="a2a_preview"): + """An agent implementing the A2A protocol. + + :ivar type: The type of the tool. Always ``a2a``. Required. + :vartype type: str or ~azure.ai.projects.models.A2_A_PREVIEW + :ivar base_url: Base URL of the agent. + :vartype base_url: str + :ivar agent_card_path: The path to the agent card relative to the ``base_url``. + If not provided, defaults to ``/.well-known/agent-card.json``. + :vartype agent_card_path: str + :ivar project_connection_id: The connection ID in the project for the A2A server. + The connection stores authentication and other connection details needed to connect to the A2A + server. + :vartype project_connection_id: str + """ + + type: Literal[ToolType.A2_A_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``a2a``. Required.""" + base_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base URL of the agent.""" + agent_card_path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The path to the agent card relative to the ``base_url``. + If not provided, defaults to ``/.well-known/agent-card.json``.""" + project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The connection ID in the project for the A2A server. + The connection stores authentication and other connection details needed to connect to the A2A + server.""" + + @overload + def __init__( + self, + *, + base_url: Optional[str] = None, + agent_card_path: Optional[str] = None, + project_connection_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.A2_A_PREVIEW # type: ignore + + +class InsightResult(_Model): + """The result of the insights. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AgentClusterInsightResult, EvalCompareReport, EvaluationRunClusterInsightResult + + :ivar type: The type of insights result. Required. Known values are: + "EvaluationRunClusterInsight", "AgentClusterInsight", and "EvaluationComparison". + :vartype type: str or ~azure.ai.projects.models.InsightType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of insights result. Required. Known values are: \"EvaluationRunClusterInsight\", + \"AgentClusterInsight\", and \"EvaluationComparison\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentClusterInsightResult(InsightResult, discriminator="AgentClusterInsight"): + """Insights from the agent cluster analysis. + + :ivar type: The type of insights result. Required. Cluster Insight on an Agent. + :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT + :ivar cluster_insight: Required. + :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult + """ + + type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Cluster Insight on an Agent.""" + cluster_insight: "_models.ClusterInsightResult" = rest_field( + name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + cluster_insight: "_models.ClusterInsightResult", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.AGENT_CLUSTER_INSIGHT # type: ignore + + +class InsightRequest(_Model): + """The request of the insights report. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AgentClusterInsightsRequest, EvaluationComparisonRequest, EvaluationRunClusterInsightsRequest + + :ivar type: The type of request. Required. Known values are: "EvaluationRunClusterInsight", + "AgentClusterInsight", and "EvaluationComparison". + :vartype type: str or ~azure.ai.projects.models.InsightType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of request. Required. Known values are: \"EvaluationRunClusterInsight\", + \"AgentClusterInsight\", and \"EvaluationComparison\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentClusterInsightsRequest(InsightRequest, discriminator="AgentClusterInsight"): + """Insights on set of Agent Evaluation Results. + + :ivar type: The type of request. Required. Cluster Insight on an Agent. + :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT + :ivar agent_name: Identifier for the agent. Required. + :vartype agent_name: str + :ivar model_configuration: Configuration of the model used in the insight generation. + :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration + """ + + type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of request. Required. Cluster Insight on an Agent.""" + agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) + """Identifier for the agent. Required.""" + model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( + name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration of the model used in the insight generation.""" + + @overload + def __init__( + self, + *, + agent_name: str, + model_configuration: Optional["_models.InsightModelConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.AGENT_CLUSTER_INSIGHT # type: ignore + + +class AgentContainerObject(_Model): + """The details of the container of a specific version of an agent. + + :ivar object: The object type, which is always 'agent.container'. Required. Default value is + "agent.container". + :vartype object: str + :ivar status: The status of the container of a specific version of an agent. Required. Known + values are: "Starting", "Running", "Stopping", "Stopped", "Failed", "Deleting", "Deleted", and + "Updating". + :vartype status: str or ~azure.ai.projects.models.AgentContainerStatus + :ivar max_replicas: The maximum number of replicas for the container. Default is 1. + :vartype max_replicas: int + :ivar min_replicas: The minimum number of replicas for the container. Default is 1. + :vartype min_replicas: int + :ivar error_message: The error message if the container failed to operate, if any. + :vartype error_message: str + :ivar created_at: The creation time of the container. Required. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The last update time of the container. Required. + :vartype updated_at: ~datetime.datetime + """ + + object: Literal["agent.container"] = rest_field(visibility=["read"]) + """The object type, which is always 'agent.container'. Required. Default value is + \"agent.container\".""" + status: Union[str, "_models.AgentContainerStatus"] = rest_field(visibility=["read"]) + """The status of the container of a specific version of an agent. Required. Known values are: + \"Starting\", \"Running\", \"Stopping\", \"Stopped\", \"Failed\", \"Deleting\", \"Deleted\", + and \"Updating\".""" + max_replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of replicas for the container. Default is 1.""" + min_replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The minimum number of replicas for the container. Default is 1.""" + error_message: Optional[str] = rest_field(visibility=["read"]) + """The error message if the container failed to operate, if any.""" + created_at: datetime.datetime = rest_field(visibility=["read"], format="rfc3339") + """The creation time of the container. Required.""" + updated_at: datetime.datetime = rest_field(visibility=["read"], format="rfc3339") + """The last update time of the container. Required.""" + + @overload + def __init__( + self, + *, + max_replicas: Optional[int] = None, + min_replicas: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.container"] = "agent.container" + + +class AgentContainerOperationError(_Model): + """The error details of the container operation, if any. + + :ivar code: The error code of the container operation, if any. Required. + :vartype code: str + :ivar type: The error type of the container operation, if any. Required. + :vartype type: str + :ivar message: The error message of the container operation, if any. Required. + :vartype message: str + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code of the container operation, if any. Required.""" + type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error type of the container operation, if any. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error message of the container operation, if any. Required.""" + + @overload + def __init__( + self, + *, + code: str, + type: str, + message: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentContainerOperationObject(_Model): + """The container operation for a specific version of an agent. + + :ivar id: The ID of the container operation. This id is unique identifier across the system. + Required. + :vartype id: str + :ivar agent_id: The ID of the agent. Required. + :vartype agent_id: str + :ivar agent_version_id: The ID of the agent version. Required. + :vartype agent_version_id: str + :ivar status: The status of the container operation. Required. Known values are: "NotStarted", + "InProgress", "Succeeded", and "Failed". + :vartype status: str or ~azure.ai.projects.models.AgentContainerOperationStatus + :ivar error: The error of the container operation, if any. + :vartype error: ~azure.ai.projects.models.AgentContainerOperationError + :ivar container: The container of the specific version of an agent. + :vartype container: ~azure.ai.projects.models.AgentContainerObject + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container operation. This id is unique identifier across the system. Required.""" + agent_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the agent. Required.""" + agent_version_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the agent version. Required.""" + status: Union[str, "_models.AgentContainerOperationStatus"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the container operation. Required. Known values are: \"NotStarted\", + \"InProgress\", \"Succeeded\", and \"Failed\".""" + error: Optional["_models.AgentContainerOperationError"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The error of the container operation, if any.""" + container: Optional["_models.AgentContainerObject"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The container of the specific version of an agent.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + agent_id: str, + agent_version_id: str, + status: Union[str, "_models.AgentContainerOperationStatus"], + error: Optional["_models.AgentContainerOperationError"] = None, + container: Optional["_models.AgentContainerObject"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentDefinition(_Model): + """AgentDefinition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ContainerAppAgentDefinition, HostedAgentDefinition, PromptAgentDefinition, WorkflowDefinition + + :ivar kind: Required. Known values are: "prompt", "hosted", "container_app", and "workflow". + :vartype kind: str or ~azure.ai.projects.models.AgentKind + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + """ + + __mapping__: dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"prompt\", \"hosted\", \"container_app\", and \"workflow\".""" + rai_config: Optional["_models.RaiConfig"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Configuration for Responsible AI (RAI) content filtering and safety features.""" + + @overload + def __init__( + self, + *, + kind: str, + rai_config: Optional["_models.RaiConfig"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BaseCredentials(_Model): + """A base class for connection credentials. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EntraIDCredentials, AgenticIdentityCredentials, ApiKeyCredentials, CustomCredential, + NoAuthenticationCredentials, SASCredentials + + :ivar type: The type of credential used by the connection. Required. Known values are: + "ApiKey", "AAD", "SAS", "CustomKeys", "None", and "AgenticIdentityToken". + :vartype type: str or ~azure.ai.projects.models.CredentialType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read"]) + """The type of credential used by the connection. Required. Known values are: \"ApiKey\", \"AAD\", + \"SAS\", \"CustomKeys\", \"None\", and \"AgenticIdentityToken\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgenticIdentityCredentials(BaseCredentials, discriminator="AgenticIdentityToken"): + """Agentic identity credential definition. + + :ivar type: The credential type. Required. Agentic identity credential + :vartype type: str or ~azure.ai.projects.models.AGENTIC_IDENTITY + """ + + type: Literal[CredentialType.AGENTIC_IDENTITY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Agentic identity credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.AGENTIC_IDENTITY # type: ignore + + +class AgentId(_Model): + """AgentId. + + :ivar type: Required. Default value is "agent_id". + :vartype type: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar version: The version identifier of the agent. Required. + :vartype version: str + """ + + type: Literal["agent_id"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. Default value is \"agent_id\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent. Required.""" + + @overload + def __init__( + self, + *, + name: str, + version: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["agent_id"] = "agent_id" + + +class AgentObject(_Model): + """AgentObject. + + :ivar object: The object type, which is always 'agent'. Required. Default value is "agent". + :vartype object: str + :ivar id: The unique identifier of the agent. Required. + :vartype id: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar versions: The latest version of the agent. Required. + :vartype versions: ~azure.ai.projects.models.AgentObjectVersions + """ + + object: Literal["agent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'agent'. Required. Default value is \"agent\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the agent. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + versions: "_models.AgentObjectVersions" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The latest version of the agent. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + versions: "_models.AgentObjectVersions", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent"] = "agent" + + +class AgentObjectVersions(_Model): + """AgentObjectVersions. + + :ivar latest: Required. + :vartype latest: ~azure.ai.projects.models.AgentVersionObject + """ + + latest: "_models.AgentVersionObject" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + latest: "_models.AgentVersionObject", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentReference(_Model): + """AgentReference. + + :ivar type: Required. Default value is "agent_reference". + :vartype type: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar version: The version identifier of the agent. + :vartype version: str + """ + + type: Literal["agent_reference"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. Default value is \"agent_reference\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent.""" + + @overload + def __init__( + self, + *, + name: str, + version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["agent_reference"] = "agent_reference" + + +class EvaluationTaxonomyInput(_Model): + """Input configuration for the evaluation taxonomy. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AgentTaxonomyInput + + :ivar type: Input type of the evaluation taxonomy. Required. Known values are: "agent" and + "policy". + :vartype type: str or ~azure.ai.projects.models.EvaluationTaxonomyInputType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Input type of the evaluation taxonomy. Required. Known values are: \"agent\" and \"policy\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentTaxonomyInput(EvaluationTaxonomyInput, discriminator="agent"): + """Input configuration for the evaluation taxonomy when the input type is agent. + + :ivar type: Input type of the evaluation taxonomy. Required. Agent + :vartype type: str or ~azure.ai.projects.models.AGENT + :ivar target: Target configuration for the agent. Required. + :vartype target: ~azure.ai.projects.models.AzureAIAgentTarget + :ivar risk_categories: List of risk categories to evaluate against. Required. + :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] + """ + + type: Literal[EvaluationTaxonomyInputType.AGENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Input type of the evaluation taxonomy. Required. Agent""" + target: "_models.AzureAIAgentTarget" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Target configuration for the agent. Required.""" + risk_categories: list[Union[str, "_models.RiskCategory"]] = rest_field( + name="riskCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of risk categories to evaluate against. Required.""" + + @overload + def __init__( + self, + *, + target: "_models.AzureAIAgentTarget", + risk_categories: list[Union[str, "_models.RiskCategory"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluationTaxonomyInputType.AGENT # type: ignore + + +class AgentVersionObject(_Model): + """AgentVersionObject. + + :ivar metadata: Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. + Keys are strings with a maximum length of 64 characters. Values are strings + with a maximum length of 512 characters. Required. + :vartype metadata: dict[str, str] + :ivar object: The object type, which is always 'agent.version'. Required. Default value is + "agent.version". + :vartype object: str + :ivar id: The unique identifier of the agent version. Required. + :vartype id: str + :ivar name: The name of the agent. Name can be used to retrieve/update/delete the agent. + Required. + :vartype name: str + :ivar version: The version identifier of the agent. Agents are immutable and every update + creates a new version while keeping the name same. Required. + :vartype version: str + :ivar description: A human-readable description of the agent. + :vartype description: str + :ivar created_at: The Unix timestamp (seconds) when the agent was created. Required. + :vartype created_at: ~datetime.datetime + :ivar definition: Required. + :vartype definition: ~azure.ai.projects.models.AgentDefinition + """ + + metadata: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. + Keys are strings with a maximum length of 64 characters. Values are strings + with a maximum length of 512 characters. Required.""" + object: Literal["agent.version"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'agent.version'. Required. Default value is \"agent.version\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the agent version. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Name can be used to retrieve/update/delete the agent. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent. Agents are immutable and every update creates a new + version while keeping the name same. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the agent.""" + created_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the agent was created. Required.""" + definition: "_models.AgentDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + metadata: dict[str, str], + id: str, # pylint: disable=redefined-builtin + name: str, + version: str, + created_at: datetime.datetime, + definition: "_models.AgentDefinition", + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.version"] = "agent.version" + + +class AISearchIndexResource(_Model): + """A AI Search Index resource. + + :ivar project_connection_id: An index connection ID in an IndexResource attached to this agent. + Required. + :vartype project_connection_id: str + :ivar index_name: The name of an index in an IndexResource attached to this agent. + :vartype index_name: str + :ivar query_type: Type of query in an AIIndexResource attached to this agent. Known values are: + "simple", "semantic", "vector", "vector_simple_hybrid", and "vector_semantic_hybrid". + :vartype query_type: str or ~azure.ai.projects.models.AzureAISearchQueryType + :ivar top_k: Number of documents to retrieve from search and present to the model. + :vartype top_k: int + :ivar filter: filter string for search resource. Learn more from here: + `https://learn.microsoft.com/azure/search/search-filters + `_. + :vartype filter: str + :ivar index_asset_id: Index asset id for search resource. + :vartype index_asset_id: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An index connection ID in an IndexResource attached to this agent. Required.""" + index_name: Optional[str] = rest_field(name="indexName", visibility=["read", "create", "update", "delete", "query"]) + """The name of an index in an IndexResource attached to this agent.""" + query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = rest_field( + name="queryType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of query in an AIIndexResource attached to this agent. Known values are: \"simple\", + \"semantic\", \"vector\", \"vector_simple_hybrid\", and \"vector_semantic_hybrid\".""" + top_k: Optional[int] = rest_field(name="topK", visibility=["read", "create", "update", "delete", "query"]) + """Number of documents to retrieve from search and present to the model.""" + filter: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """filter string for search resource. Learn more from here: + `https://learn.microsoft.com/azure/search/search-filters + `_.""" + index_asset_id: Optional[str] = rest_field( + name="indexAssetId", visibility=["read", "create", "update", "delete", "query"] + ) + """Index asset id for search resource.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + index_name: Optional[str] = None, + query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = None, + top_k: Optional[int] = None, + filter: Optional[str] = None, # pylint: disable=redefined-builtin + index_asset_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Annotation(_Model): + """Annotation. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AnnotationFileCitation, AnnotationFilePath, AnnotationUrlCitation + + :ivar type: Required. Known values are: "file_citation", "url_citation", "file_path", and + "container_file_citation". + :vartype type: str or ~azure.ai.projects.models.AnnotationType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"file_citation\", \"url_citation\", \"file_path\", and + \"container_file_citation\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AnnotationFileCitation(Annotation, discriminator="file_citation"): + """A citation to a file. + + :ivar type: The type of the file citation. Always ``file_citation``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_CITATION + :ivar file_id: The ID of the file. Required. + :vartype file_id: str + :ivar index: The index of the file in the list of files. Required. + :vartype index: int + :ivar filename: The filename of the file cited. Required. + :vartype filename: str + """ + + type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file citation. Always ``file_citation``. Required.""" + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file. Required.""" + index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the file in the list of files. Required.""" + filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The filename of the file cited. Required.""" + + @overload + def __init__( + self, + *, + file_id: str, + index: int, + filename: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = AnnotationType.FILE_CITATION # type: ignore + + +class AnnotationFilePath(Annotation, discriminator="file_path"): + """A path to a file. + + :ivar type: The type of the file path. Always ``file_path``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_PATH + :ivar file_id: The ID of the file. Required. + :vartype file_id: str + :ivar index: The index of the file in the list of files. Required. + :vartype index: int + """ + + type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file path. Always ``file_path``. Required.""" + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file. Required.""" + index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the file in the list of files. Required.""" + + @overload + def __init__( + self, + *, + file_id: str, + index: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = AnnotationType.FILE_PATH # type: ignore + + +class AnnotationUrlCitation(Annotation, discriminator="url_citation"): + """A citation for a web resource used to generate a model response. + + :ivar type: The type of the URL citation. Always ``url_citation``. Required. + :vartype type: str or ~azure.ai.projects.models.URL_CITATION + :ivar url: The URL of the web resource. Required. + :vartype url: str + :ivar start_index: The index of the first character of the URL citation in the message. + Required. + :vartype start_index: int + :ivar end_index: The index of the last character of the URL citation in the message. Required. + :vartype end_index: int + :ivar title: The title of the web resource. Required. + :vartype title: str + """ + + type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the URL citation. Always ``url_citation``. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the web resource. Required.""" + start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the first character of the URL citation in the message. Required.""" + end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the last character of the URL citation in the message. Required.""" + title: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The title of the web resource. Required.""" + + @overload + def __init__( + self, + *, + url: str, + start_index: int, + end_index: int, + title: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = AnnotationType.URL_CITATION # type: ignore + + +class ApiError(_Model): + """ApiError. + + :ivar code: The error code. Required. + :vartype code: str + :ivar message: A human-readable description of the error. Required. + :vartype message: str + :ivar target: The target of the error, if applicable. + :vartype target: str + :ivar details: Additional details about the error. Required. + :vartype details: list[~azure.ai.projects.models.ApiError] + :ivar innererror: The inner error, if any. + :vartype innererror: ~azure.ai.projects.models.ApiInnerError + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the error. Required.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The target of the error, if applicable.""" + details: list["_models.ApiError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional details about the error. Required.""" + innererror: Optional["_models.ApiInnerError"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The inner error, if any.""" + + @overload + def __init__( + self, + *, + code: str, + message: str, + details: list["_models.ApiError"], + target: Optional[str] = None, + innererror: Optional["_models.ApiInnerError"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApiErrorResponse(_Model): + """Error response for API failures. + + :ivar error: Required. + :vartype error: ~azure.ai.projects.models.ApiError + """ + + error: "_models.ApiError" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + error: "_models.ApiError", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApiInnerError(_Model): + """ApiInnerError. + + :ivar code: The error code. Required. + :vartype code: str + :ivar innererror: The inner error, if any. + :vartype innererror: ~azure.ai.projects.models.ApiInnerError + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code. Required.""" + innererror: Optional["_models.ApiInnerError"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The inner error, if any.""" + + @overload + def __init__( + self, + *, + code: str, + innererror: Optional["_models.ApiInnerError"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): + """API Key Credential definition. + + :ivar type: The credential type. Required. API Key credential + :vartype type: str or ~azure.ai.projects.models.API_KEY + :ivar api_key: API Key. + :vartype api_key: str + """ + + type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. API Key credential""" + api_key: Optional[str] = rest_field(name="key", visibility=["read"]) + """API Key.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.API_KEY # type: ignore + + +class Location(_Model): + """Location. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ApproximateLocation + + :ivar type: Required. "approximate" + :vartype type: str or ~azure.ai.projects.models.LocationType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. \"approximate\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApproximateLocation(Location, discriminator="approximate"): + """ApproximateLocation. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.APPROXIMATE + :ivar country: + :vartype country: str + :ivar region: + :vartype region: str + :ivar city: + :vartype city: str + :ivar timezone: + :vartype timezone: str + """ + + type: Literal[LocationType.APPROXIMATE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + country: Optional[str] = None, + region: Optional[str] = None, + city: Optional[str] = None, + timezone: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = LocationType.APPROXIMATE # type: ignore + + +class Target(_Model): + """Base class for targets with discriminator support. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureAIAgentTarget, AzureAIAssistantTarget, AzureAIModelTarget + + :ivar type: The type of target. Required. Default value is None. + :vartype type: str + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of target. Required. Default value is None.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureAIAgentTarget(Target, discriminator="azure_ai_agent"): + """Represents a target specifying an Azure AI agent. + + :ivar type: The type of target, always ``azure_ai_agent``. Required. Default value is + "azure_ai_agent". + :vartype type: str + :ivar name: The unique identifier of the Azure AI agent. Required. + :vartype name: str + :ivar version: The version of the Azure AI agent. + :vartype version: str + :ivar tool_descriptions: The parameters used to control the sampling behavior of the agent + during text generation. + :vartype tool_descriptions: list[~azure.ai.projects.models.ToolDescription] + """ + + type: Literal["azure_ai_agent"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of target, always ``azure_ai_agent``. Required. Default value is \"azure_ai_agent\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the Azure AI agent. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version of the Azure AI agent.""" + tool_descriptions: Optional[list["_models.ToolDescription"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The parameters used to control the sampling behavior of the agent during text generation.""" + + @overload + def __init__( + self, + *, + name: str, + version: Optional[str] = None, + tool_descriptions: Optional[list["_models.ToolDescription"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = "azure_ai_agent" # type: ignore + + +class AzureAISearchAgentTool(Tool, discriminator="azure_ai_search"): + """The input definition information for an Azure AI search tool as used to configure an agent. + + :ivar type: The object type, which is always 'azure_ai_search'. Required. + :vartype type: str or ~azure.ai.projects.models.AZURE_AI_SEARCH + :ivar azure_ai_search: The azure ai search index resource. Required. + :vartype azure_ai_search: ~azure.ai.projects.models.AzureAISearchToolResource + """ + + type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'azure_ai_search'. Required.""" + azure_ai_search: "_models.AzureAISearchToolResource" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The azure ai search index resource. Required.""" + + @overload + def __init__( + self, + *, + azure_ai_search: "_models.AzureAISearchToolResource", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.AZURE_AI_SEARCH # type: ignore + + +class Index(_Model): + """Index resource Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureAISearchIndex, CosmosDBIndex, ManagedAzureAISearchIndex + + :ivar type: Type of index. Required. Known values are: "AzureSearch", + "CosmosDBNoSqlVectorStore", and "ManagedAzureSearch". + :vartype type: str or ~azure.ai.projects.models.IndexType + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of index. Required. Known values are: \"AzureSearch\", \"CosmosDBNoSqlVectorStore\", and + \"ManagedAzureSearch\".""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + type: str, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureAISearchIndex(Index, discriminator="AzureSearch"): + """Azure AI Search Index Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. Azure search + :vartype type: str or ~azure.ai.projects.models.AZURE_SEARCH + :ivar connection_name: Name of connection to Azure AI Search. Required. + :vartype connection_name: str + :ivar index_name: Name of index in Azure AI Search resource to attach. Required. + :vartype index_name: str + :ivar field_mapping: Field mapping configuration. + :vartype field_mapping: ~azure.ai.projects.models.FieldMapping + """ + + type: Literal[IndexType.AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. Azure search""" + connection_name: str = rest_field(name="connectionName", visibility=["create"]) + """Name of connection to Azure AI Search. Required.""" + index_name: str = rest_field(name="indexName", visibility=["create"]) + """Name of index in Azure AI Search resource to attach. Required.""" + field_mapping: Optional["_models.FieldMapping"] = rest_field(name="fieldMapping", visibility=["create"]) + """Field mapping configuration.""" + + @overload + def __init__( + self, + *, + connection_name: str, + index_name: str, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + field_mapping: Optional["_models.FieldMapping"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = IndexType.AZURE_SEARCH # type: ignore + + +class AzureAISearchToolResource(_Model): + """A set of index resources used by the ``azure_ai_search`` tool. + + :ivar index_list: The indices attached to this agent. There can be a maximum of 1 index + resource attached to the agent. + :vartype index_list: list[~azure.ai.projects.models.AISearchIndexResource] + """ + + index_list: Optional[list["_models.AISearchIndexResource"]] = rest_field( + name="indexList", visibility=["read", "create", "update", "delete", "query"] + ) + """The indices attached to this agent. There can be a maximum of 1 index + resource attached to the agent.""" + + @overload + def __init__( + self, + *, + index_list: Optional[list["_models.AISearchIndexResource"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureFunctionAgentTool(Tool, discriminator="azure_function"): + """The input definition information for an Azure Function Tool, as used to configure an Agent. + + :ivar type: The object type, which is always 'browser_automation'. Required. + :vartype type: str or ~azure.ai.projects.models.AZURE_FUNCTION + :ivar azure_function: The Azure Function Tool definition. Required. + :vartype azure_function: ~azure.ai.projects.models.AzureFunctionDefinition + """ + + type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'browser_automation'. Required.""" + azure_function: "_models.AzureFunctionDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The Azure Function Tool definition. Required.""" + + @overload + def __init__( + self, + *, + azure_function: "_models.AzureFunctionDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.AZURE_FUNCTION # type: ignore + + +class AzureFunctionBinding(_Model): + """The structure for keeping storage queue name and URI. + + :ivar type: The type of binding, which is always 'storage_queue'. Required. Default value is + "storage_queue". + :vartype type: str + :ivar storage_queue: Storage queue. Required. + :vartype storage_queue: ~azure.ai.projects.models.AzureFunctionStorageQueue + """ + + type: Literal["storage_queue"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of binding, which is always 'storage_queue'. Required. Default value is + \"storage_queue\".""" + storage_queue: "_models.AzureFunctionStorageQueue" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Storage queue. Required.""" + + @overload + def __init__( + self, + *, + storage_queue: "_models.AzureFunctionStorageQueue", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["storage_queue"] = "storage_queue" + + +class AzureFunctionDefinition(_Model): + """The definition of Azure function. + + :ivar function: The definition of azure function and its parameters. Required. + :vartype function: ~azure.ai.projects.models.AzureFunctionDefinitionFunction + :ivar input_binding: Input storage queue. The queue storage trigger runs a function as messages + are added to it. Required. + :vartype input_binding: ~azure.ai.projects.models.AzureFunctionBinding + :ivar output_binding: Output storage queue. The function writes output to this queue when the + input items are processed. Required. + :vartype output_binding: ~azure.ai.projects.models.AzureFunctionBinding + """ + + function: "_models.AzureFunctionDefinitionFunction" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The definition of azure function and its parameters. Required.""" + input_binding: "_models.AzureFunctionBinding" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Input storage queue. The queue storage trigger runs a function as messages are added to it. + Required.""" + output_binding: "_models.AzureFunctionBinding" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Output storage queue. The function writes output to this queue when the input items are + processed. Required.""" + + @overload + def __init__( + self, + *, + function: "_models.AzureFunctionDefinitionFunction", + input_binding: "_models.AzureFunctionBinding", + output_binding: "_models.AzureFunctionBinding", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureFunctionDefinitionFunction(_Model): + """AzureFunctionDefinitionFunction. + + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. + Required. + :vartype parameters: any + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The parameters the functions accepts, described as a JSON Schema object. Required.""" + + @overload + def __init__( + self, + *, + name: str, + parameters: Any, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureFunctionStorageQueue(_Model): + """The structure for keeping storage queue name and URI. + + :ivar queue_service_endpoint: URI to the Azure Storage Queue service allowing you to manipulate + a queue. Required. + :vartype queue_service_endpoint: str + :ivar queue_name: The name of an Azure function storage queue. Required. + :vartype queue_name: str + """ + + queue_service_endpoint: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """URI to the Azure Storage Queue service allowing you to manipulate a queue. Required.""" + queue_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of an Azure function storage queue. Required.""" + + @overload + def __init__( + self, + *, + queue_service_endpoint: str, + queue_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TargetConfig(_Model): + """Abstract class for target configuration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureOpenAIModelConfiguration + + :ivar type: Type of the model configuration. Required. Default value is None. + :vartype type: str + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the model configuration. Required. Default value is None.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureOpenAIModelConfiguration(TargetConfig, discriminator="AzureOpenAIModel"): + """Azure OpenAI model configuration. The API version would be selected by the service for querying + the model. + + :ivar type: Required. Default value is "AzureOpenAIModel". + :vartype type: str + :ivar model_deployment_name: Deployment name for AOAI model. Example: gpt-4o if in AIServices + or connection based ``connection_name/deployment_name`` (e.g. ``my-aoai-connection/gpt-4o``). + Required. + :vartype model_deployment_name: str + """ + + type: Literal["AzureOpenAIModel"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Default value is \"AzureOpenAIModel\".""" + model_deployment_name: str = rest_field( + name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] + ) + """Deployment name for AOAI model. Example: gpt-4o if in AIServices or connection based + ``connection_name/deployment_name`` (e.g. ``my-aoai-connection/gpt-4o``). Required.""" + + @overload + def __init__( + self, + *, + model_deployment_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = "AzureOpenAIModel" # type: ignore + + +class BingCustomSearchAgentTool(Tool, discriminator="bing_custom_search_preview"): + """The input definition information for a Bing custom search tool as used to configure an agent. + + :ivar type: The object type, which is always 'bing_custom_search'. Required. + :vartype type: str or ~azure.ai.projects.models.BING_CUSTOM_SEARCH_PREVIEW + :ivar bing_custom_search_preview: The bing custom search tool parameters. Required. + :vartype bing_custom_search_preview: ~azure.ai.projects.models.BingCustomSearchToolParameters + """ + + type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required.""" + bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The bing custom search tool parameters. Required.""" + + @overload + def __init__( + self, + *, + bing_custom_search_preview: "_models.BingCustomSearchToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.BING_CUSTOM_SEARCH_PREVIEW # type: ignore + + +class BingCustomSearchConfiguration(_Model): + """A bing custom search configuration. + + :ivar project_connection_id: Project connection id for grounding with bing search. Required. + :vartype project_connection_id: str + :ivar instance_name: Name of the custom configuration instance given to config. Required. + :vartype instance_name: str + :ivar market: The market where the results come from. + :vartype market: str + :ivar set_lang: The language to use for user interface strings when calling Bing API. + :vartype set_lang: str + :ivar count: The number of search results to return in the bing api response. + :vartype count: int + :ivar freshness: Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_. + :vartype freshness: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for grounding with bing search. Required.""" + instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the custom configuration instance given to config. Required.""" + market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The market where the results come from.""" + set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The language to use for user interface strings when calling Bing API.""" + count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of search results to return in the bing api response.""" + freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + instance_name: str, + market: Optional[str] = None, + set_lang: Optional[str] = None, + count: Optional[int] = None, + freshness: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BingCustomSearchToolParameters(_Model): + """The bing custom search tool parameters. + + :ivar search_configurations: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. Required. + :vartype search_configurations: list[~azure.ai.projects.models.BingCustomSearchConfiguration] + """ + + search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" + + @overload + def __init__( + self, + *, + search_configurations: list["_models.BingCustomSearchConfiguration"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BingGroundingAgentTool(Tool, discriminator="bing_grounding"): + """The input definition information for a bing grounding search tool as used to configure an + agent. + + :ivar type: The object type, which is always 'bing_grounding'. Required. + :vartype type: str or ~azure.ai.projects.models.BING_GROUNDING + :ivar bing_grounding: The bing grounding search tool parameters. Required. + :vartype bing_grounding: ~azure.ai.projects.models.BingGroundingSearchToolParameters + """ + + type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_grounding'. Required.""" + bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The bing grounding search tool parameters. Required.""" + + @overload + def __init__( + self, + *, + bing_grounding: "_models.BingGroundingSearchToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.BING_GROUNDING # type: ignore + + +class BingGroundingSearchConfiguration(_Model): + """Search configuration for Bing Grounding. + + :ivar project_connection_id: Project connection id for grounding with bing search. Required. + :vartype project_connection_id: str + :ivar market: The market where the results come from. + :vartype market: str + :ivar set_lang: The language to use for user interface strings when calling Bing API. + :vartype set_lang: str + :ivar count: The number of search results to return in the bing api response. + :vartype count: int + :ivar freshness: Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_. + :vartype freshness: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for grounding with bing search. Required.""" + market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The market where the results come from.""" + set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The language to use for user interface strings when calling Bing API.""" + count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of search results to return in the bing api response.""" + freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + market: Optional[str] = None, + set_lang: Optional[str] = None, + count: Optional[int] = None, + freshness: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BingGroundingSearchToolParameters(_Model): + """The bing grounding search tool parameters. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. Required. + :vartype project_connections: ~azure.ai.projects.models.ToolProjectConnectionList + :ivar search_configurations: The search configurations attached to this tool. There can be a + maximum of 1 + search configuration resource attached to the tool. Required. + :vartype search_configurations: + list[~azure.ai.projects.models.BingGroundingSearchConfiguration] + """ + + project_connections: "_models.ToolProjectConnectionList" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" + search_configurations: list["_models.BingGroundingSearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The search configurations attached to this tool. There can be a maximum of 1 + search configuration resource attached to the tool. Required.""" + + @overload + def __init__( + self, + *, + project_connections: "_models.ToolProjectConnectionList", + search_configurations: list["_models.BingGroundingSearchConfiguration"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobReference(_Model): + """Blob reference details. + + :ivar blob_uri: Blob URI path for client to upload data. Example: + `https://blob.windows.core.net/Container/Path `_. + Required. + :vartype blob_uri: str + :ivar storage_account_arm_id: ARM ID of the storage account to use. Required. + :vartype storage_account_arm_id: str + :ivar credential: Credential info to access the storage account. Required. + :vartype credential: ~azure.ai.projects.models.BlobReferenceSasCredential + """ + + blob_uri: str = rest_field(name="blobUri", visibility=["read", "create", "update", "delete", "query"]) + """Blob URI path for client to upload data. Example: `https://blob.windows.core.net/Container/Path + `_. Required.""" + storage_account_arm_id: str = rest_field( + name="storageAccountArmId", visibility=["read", "create", "update", "delete", "query"] + ) + """ARM ID of the storage account to use. Required.""" + credential: "_models.BlobReferenceSasCredential" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Credential info to access the storage account. Required.""" + + @overload + def __init__( + self, + *, + blob_uri: str, + storage_account_arm_id: str, + credential: "_models.BlobReferenceSasCredential", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobReferenceSasCredential(_Model): + """SAS Credential definition. + + :ivar sas_uri: SAS uri. Required. + :vartype sas_uri: str + :ivar type: Type of credential. Required. Default value is "SAS". + :vartype type: str + """ + + sas_uri: str = rest_field(name="sasUri", visibility=["read"]) + """SAS uri. Required.""" + type: Literal["SAS"] = rest_field(visibility=["read"]) + """Type of credential. Required. Default value is \"SAS\".""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["SAS"] = "SAS" + + +class BrowserAutomationAgentTool(Tool, discriminator="browser_automation_preview"): + """The input definition information for a Browser Automation Tool, as used to configure an Agent. + + :ivar type: The object type, which is always 'browser_automation'. Required. + :vartype type: str or ~azure.ai.projects.models.BROWSER_AUTOMATION_PREVIEW + :ivar browser_automation_preview: The Browser Automation Tool parameters. Required. + :vartype browser_automation_preview: ~azure.ai.projects.models.BrowserAutomationToolParameters + """ + + type: Literal[ToolType.BROWSER_AUTOMATION_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'browser_automation'. Required.""" + browser_automation_preview: "_models.BrowserAutomationToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The Browser Automation Tool parameters. Required.""" + + @overload + def __init__( + self, + *, + browser_automation_preview: "_models.BrowserAutomationToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.BROWSER_AUTOMATION_PREVIEW # type: ignore + + +class BrowserAutomationToolConnectionParameters(_Model): # pylint: disable=name-too-long + """Definition of input parameters for the connection used by the Browser Automation Tool. + + :ivar id: The ID of the project connection to your Azure Playwright resource. Required. + :vartype id: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the project connection to your Azure Playwright resource. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrowserAutomationToolParameters(_Model): + """Definition of input parameters for the Browser Automation Tool. + + :ivar project_connection: The project connection parameters associated with the Browser + Automation Tool. Required. + :vartype project_connection: + ~azure.ai.projects.models.BrowserAutomationToolConnectionParameters + """ + + project_connection: "_models.BrowserAutomationToolConnectionParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connection parameters associated with the Browser Automation Tool. Required.""" + + @overload + def __init__( + self, + *, + project_connection: "_models.BrowserAutomationToolConnectionParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CaptureStructuredOutputsTool(Tool, discriminator="capture_structured_outputs"): + """A tool for capturing structured outputs. + + :ivar type: The type of the tool. Always ``capture_structured_outputs``. Required. + :vartype type: str or ~azure.ai.projects.models.CAPTURE_STRUCTURED_OUTPUTS + :ivar outputs: The structured outputs to capture from the model. Required. + :vartype outputs: ~azure.ai.projects.models.StructuredOutputDefinition + """ + + type: Literal[ToolType.CAPTURE_STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``capture_structured_outputs``. Required.""" + outputs: "_models.StructuredOutputDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The structured outputs to capture from the model. Required.""" + + @overload + def __init__( + self, + *, + outputs: "_models.StructuredOutputDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.CAPTURE_STRUCTURED_OUTPUTS # type: ignore + + +class ChartCoordinate(_Model): + """Coordinates for the analysis chart. + + :ivar x: X-axis coordinate. Required. + :vartype x: int + :ivar y: Y-axis coordinate. Required. + :vartype y: int + :ivar size: Size of the chart element. Required. + :vartype size: int + """ + + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """X-axis coordinate. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Y-axis coordinate. Required.""" + size: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Size of the chart element. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + size: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryItem(_Model): + """A single memory item stored in the memory store, containing content and metadata. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ChatSummaryMemoryItem, UserProfileMemoryItem + + :ivar memory_id: The unique ID of the memory item. Required. + :vartype memory_id: str + :ivar updated_at: The last update time of the memory item. Required. + :vartype updated_at: ~datetime.datetime + :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :vartype scope: str + :ivar content: The content of the memory. Required. + :vartype content: str + :ivar kind: The kind of the memory item. Required. Known values are: "user_profile" and + "chat_summary". + :vartype kind: str or ~azure.ai.projects.models.MemoryItemKind + """ + + __mapping__: dict[str, _Model] = {} + memory_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the memory item. Required.""" + updated_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The last update time of the memory item. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace that logically groups and isolates memories, such as a user ID. Required.""" + content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the memory. Required.""" + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """The kind of the memory item. Required. Known values are: \"user_profile\" and \"chat_summary\".""" + + @overload + def __init__( + self, + *, + memory_id: str, + updated_at: datetime.datetime, + scope: str, + content: str, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ChatSummaryMemoryItem(MemoryItem, discriminator="chat_summary"): + """A memory item containing a summary extracted from conversations. + + :ivar memory_id: The unique ID of the memory item. Required. + :vartype memory_id: str + :ivar updated_at: The last update time of the memory item. Required. + :vartype updated_at: ~datetime.datetime + :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :vartype scope: str + :ivar content: The content of the memory. Required. + :vartype content: str + :ivar kind: The kind of the memory item. Required. Summary of chat conversations. + :vartype kind: str or ~azure.ai.projects.models.CHAT_SUMMARY + """ + + kind: Literal[MemoryItemKind.CHAT_SUMMARY] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind of the memory item. Required. Summary of chat conversations.""" + + @overload + def __init__( + self, + *, + memory_id: str, + updated_at: datetime.datetime, + scope: str, + content: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = MemoryItemKind.CHAT_SUMMARY # type: ignore + + +class ClusterInsightResult(_Model): + """Insights from the cluster analysis. + + :ivar summary: Summary of the insights report. Required. + :vartype summary: ~azure.ai.projects.models.InsightSummary + :ivar clusters: List of clusters identified in the insights. Required. + :vartype clusters: list[~azure.ai.projects.models.InsightCluster] + :ivar coordinates: Optional mapping of IDs to 2D coordinates used by the UX for + visualization. + The map keys are string identifiers (for example, a cluster id or a sample id) + and the values are the coordinates and visual size for rendering on a 2D chart. + This property is omitted unless the client requests coordinates (for example, + by passing ``includeCoordinates=true`` as a query parameter). + Example: + { + "cluster-1": { "x": 12, "y": 34, "size": 8 }, + "sample-123": { "x": 18, "y": 22, "size": 4 } + } + Coordinates are intended only for client-side visualization and do not + modify the canonical insights results. + :vartype coordinates: dict[str, ~azure.ai.projects.models.ChartCoordinate] + """ + + summary: "_models.InsightSummary" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Summary of the insights report. Required.""" + clusters: list["_models.InsightCluster"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of clusters identified in the insights. Required.""" + coordinates: Optional[dict[str, "_models.ChartCoordinate"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """ Optional mapping of IDs to 2D coordinates used by the UX for visualization. + The map keys are string identifiers (for example, a cluster id or a sample id) + and the values are the coordinates and visual size for rendering on a 2D chart. + This property is omitted unless the client requests coordinates (for example, + by passing ``includeCoordinates=true`` as a query parameter). + Example: + { + \"cluster-1\": { \"x\": 12, \"y\": 34, \"size\": 8 }, + \"sample-123\": { \"x\": 18, \"y\": 22, \"size\": 4 } + } + Coordinates are intended only for client-side visualization and do not + modify the canonical insights results.""" + + @overload + def __init__( + self, + *, + summary: "_models.InsightSummary", + clusters: list["_models.InsightCluster"], + coordinates: Optional[dict[str, "_models.ChartCoordinate"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ClusterTokenUsage(_Model): + """Token usage for cluster analysis. + + :ivar input_token_usage: input token usage. Required. + :vartype input_token_usage: int + :ivar output_token_usage: output token usage. Required. + :vartype output_token_usage: int + :ivar total_token_usage: total token usage. Required. + :vartype total_token_usage: int + """ + + input_token_usage: int = rest_field( + name="inputTokenUsage", visibility=["read", "create", "update", "delete", "query"] + ) + """input token usage. Required.""" + output_token_usage: int = rest_field( + name="outputTokenUsage", visibility=["read", "create", "update", "delete", "query"] + ) + """output token usage. Required.""" + total_token_usage: int = rest_field( + name="totalTokenUsage", visibility=["read", "create", "update", "delete", "query"] + ) + """total token usage. Required.""" + + @overload + def __init__( + self, + *, + input_token_usage: int, + output_token_usage: int, + total_token_usage: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluatorDefinition(_Model): + """Base evaluator configuration with discriminator. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeBasedEvaluatorDefinition, PromptBasedEvaluatorDefinition + + :ivar type: The type of evaluator definition. Required. Known values are: "prompt", "code", + "prompt_and_code", "service", and "openai_graders". + :vartype type: str or ~azure.ai.projects.models.EvaluatorDefinitionType + :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. + This includes parameters like type, properties, required. + :vartype init_parameters: any + :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This + includes parameters like type, properties, required. + :vartype data_schema: any + :ivar metrics: List of output metrics produced by this evaluator. + :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of evaluator definition. Required. Known values are: \"prompt\", \"code\", + \"prompt_and_code\", \"service\", and \"openai_graders\".""" + init_parameters: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema (Draft 2020-12) for the evaluator's input parameters. This includes parameters + like type, properties, required.""" + data_schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema (Draft 2020-12) for the evaluator's input data. This includes parameters like + type, properties, required.""" + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of output metrics produced by this evaluator.""" + + @overload + def __init__( + self, + *, + type: str, + init_parameters: Optional[Any] = None, + data_schema: Optional[Any] = None, + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="code"): + """Code-based evaluator definition using python code. + + :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. + This includes parameters like type, properties, required. + :vartype init_parameters: any + :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This + includes parameters like type, properties, required. + :vartype data_schema: any + :ivar metrics: List of output metrics produced by this evaluator. + :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] + :ivar type: Required. Code-based definition + :vartype type: str or ~azure.ai.projects.models.CODE + :ivar code_text: Inline code text for the evaluator. Required. + :vartype code_text: str + """ + + type: Literal[EvaluatorDefinitionType.CODE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Code-based definition""" + code_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Inline code text for the evaluator. Required.""" + + @overload + def __init__( + self, + *, + code_text: str, + init_parameters: Optional[Any] = None, + data_schema: Optional[Any] = None, + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluatorDefinitionType.CODE # type: ignore + + +class CodeInterpreterOutput(_Model): + """CodeInterpreterOutput. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeInterpreterOutputImage, CodeInterpreterOutputLogs + + :ivar type: Required. Known values are: "logs" and "image". + :vartype type: str or ~azure.ai.projects.models.CodeInterpreterOutputType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"logs\" and \"image\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeInterpreterOutputImage(CodeInterpreterOutput, discriminator="image"): + """The image output from the code interpreter. + + :ivar type: The type of the output. Always 'image'. Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE + :ivar url: The URL of the image output from the code interpreter. Required. + :vartype url: str + """ + + type: Literal[CodeInterpreterOutputType.IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output. Always 'image'. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the image output from the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + url: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CodeInterpreterOutputType.IMAGE # type: ignore + + +class CodeInterpreterOutputLogs(CodeInterpreterOutput, discriminator="logs"): + """The logs output from the code interpreter. + + :ivar type: The type of the output. Always 'logs'. Required. + :vartype type: str or ~azure.ai.projects.models.LOGS + :ivar logs: The logs output from the code interpreter. Required. + :vartype logs: str + """ + + type: Literal[CodeInterpreterOutputType.LOGS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output. Always 'logs'. Required.""" + logs: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The logs output from the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + logs: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CodeInterpreterOutputType.LOGS # type: ignore + + +class CodeInterpreterTool(Tool, discriminator="code_interpreter"): + """A tool that runs Python code to help generate a response to a prompt. + + :ivar type: The type of the code interpreter tool. Always ``code_interpreter``. Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER + :ivar container: The code interpreter container. Can be a container ID or an object that + specifies uploaded file IDs to make available to your code. Required. Is either a str type or a + CodeInterpreterToolAuto type. + :vartype container: str or ~azure.ai.projects.models.CodeInterpreterToolAuto + """ + + type: Literal[ToolType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the code interpreter tool. Always ``code_interpreter``. Required.""" + container: Union[str, "_models.CodeInterpreterToolAuto"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The code interpreter container. Can be a container ID or an object that + specifies uploaded file IDs to make available to your code. Required. Is either a str type or a + CodeInterpreterToolAuto type.""" + + @overload + def __init__( + self, + *, + container: Union[str, "_models.CodeInterpreterToolAuto"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.CODE_INTERPRETER # type: ignore + + +class CodeInterpreterToolAuto(_Model): + """Configuration for a code interpreter container. Optionally specify the IDs + of the files to run the code on. + + :ivar type: Always ``auto``. Required. Default value is "auto". + :vartype type: str + :ivar file_ids: An optional list of uploaded files to make available to your code. + :vartype file_ids: list[str] + """ + + type: Literal["auto"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Always ``auto``. Required. Default value is \"auto\".""" + file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An optional list of uploaded files to make available to your code.""" + + @overload + def __init__( + self, + *, + file_ids: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["auto"] = "auto" + + +class ItemParam(_Model): + """Content item used to generate a response. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeInterpreterToolCallItemParam, ComputerToolCallItemParam, ComputerToolCallOutputItemParam, + FileSearchToolCallItemParam, FunctionToolCallItemParam, FunctionToolCallOutputItemParam, + ImageGenToolCallItemParam, ItemReferenceItemParam, LocalShellToolCallItemParam, + LocalShellToolCallOutputItemParam, MCPApprovalRequestItemParam, MCPApprovalResponseItemParam, + MCPCallItemParam, MCPListToolsItemParam, MemorySearchToolCallItemParam, + ResponsesMessageItemParam, ReasoningItemParam, WebSearchToolCallItemParam + + :ivar type: Required. Known values are: "message", "file_search_call", "function_call", + "function_call_output", "computer_call", "computer_call_output", "web_search_call", + "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", + "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", + "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", + "memory_search_call", and "oauth_consent_request". + :vartype type: str or ~azure.ai.projects.models.ItemType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", + \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", + \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", + \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", + \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", + \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeInterpreterToolCallItemParam(ItemParam, discriminator="code_interpreter_call"): + """A tool call to run code. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL + :ivar container_id: The ID of the container used to run the code. Required. + :vartype container_id: str + :ivar code: The code to run, or null if not available. Required. + :vartype code: str + :ivar outputs: The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required. + :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] + """ + + type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container used to run the code. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The code to run, or null if not available. Required.""" + outputs: list["_models.CodeInterpreterOutput"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required.""" + + @overload + def __init__( + self, + *, + container_id: str, + code: str, + outputs: list["_models.CodeInterpreterOutput"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore + + +class ItemResource(_Model): + """Content item used to generate a response. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeInterpreterToolCallItemResource, ComputerToolCallItemResource, + ComputerToolCallOutputItemResource, FileSearchToolCallItemResource, + FunctionToolCallItemResource, FunctionToolCallOutputItemResource, ImageGenToolCallItemResource, + LocalShellToolCallItemResource, LocalShellToolCallOutputItemResource, + MCPApprovalRequestItemResource, MCPApprovalResponseItemResource, MCPCallItemResource, + MCPListToolsItemResource, MemorySearchToolCallItemResource, ResponsesMessageItemResource, + OAuthConsentRequestItemResource, ReasoningItemResource, StructuredOutputsItemResource, + WebSearchToolCallItemResource, WorkflowActionOutputItemResource + + :ivar type: Required. Known values are: "message", "file_search_call", "function_call", + "function_call_output", "computer_call", "computer_call_output", "web_search_call", + "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", + "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", + "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", + "memory_search_call", and "oauth_consent_request". + :vartype type: str or ~azure.ai.projects.models.ItemType + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", + \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", + \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", + \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", + \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", + \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + created_by: Optional["_models.CreatedBy"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The information about the creator of the item.""" + + @overload + def __init__( + self, + *, + type: str, + id: str, # pylint: disable=redefined-builtin + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeInterpreterToolCallItemResource(ItemResource, discriminator="code_interpreter_call"): + """A tool call to run code. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["incomplete"], Literal["interpreting"], Literal["failed"] + :vartype status: str or str or str or str or str + :ivar container_id: The ID of the container used to run the code. Required. + :vartype container_id: str + :ivar code: The code to run, or null if not available. Required. + :vartype code: str + :ivar outputs: The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required. + :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] + """ + + type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"interpreting\"], Literal[\"failed\"]""" + container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container used to run the code. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The code to run, or null if not available. Required.""" + outputs: list["_models.CodeInterpreterOutput"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"], + container_id: str, + code: str, + outputs: list["_models.CodeInterpreterOutput"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore + + +class ComparisonFilter(_Model): + """A filter used to compare a specified attribute key to a given value using a defined comparison + operation. + + :ivar type: Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, + ``lte``. + * `eq`: equals + * `ne`: not equal + * `gt`: greater than + * `gte`: greater than or equal + * `lt`: less than + * `lte`: less than or equal. Required. Is one of the following types: Literal["eq"], + Literal["ne"], Literal["gt"], Literal["gte"], Literal["lt"], Literal["lte"] + :vartype type: str or str or str or str or str or str + :ivar key: The key to compare against the value. Required. + :vartype key: str + :ivar value: The value to compare against the attribute key; supports string, number, or + boolean types. Required. Is one of the following types: str, float, bool + :vartype value: str or float or bool + """ + + type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, ``lte``. + * `eq`: equals + * `ne`: not equal + * `gt`: greater than + * `gte`: greater than or equal + * `lt`: less than + * `lte`: less than or equal. Required. Is one of the following types: Literal[\"eq\"], + Literal[\"ne\"], Literal[\"gt\"], Literal[\"gte\"], Literal[\"lt\"], Literal[\"lte\"]""" + key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The key to compare against the value. Required.""" + value: Union[str, float, bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value to compare against the attribute key; supports string, number, or boolean types. + Required. Is one of the following types: str, float, bool""" + + @overload + def __init__( + self, + *, + type: Literal["eq", "ne", "gt", "gte", "lt", "lte"], + key: str, + value: Union[str, float, bool], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CompoundFilter(_Model): + """Combine multiple filters using ``and`` or ``or``. + + :ivar type: Type of operation: ``and`` or ``or``. Required. Is either a Literal["and"] type or + a Literal["or"] type. + :vartype type: str or str + :ivar filters: Array of filters to combine. Items can be ``ComparisonFilter`` or + ``CompoundFilter``. Required. + :vartype filters: list[~azure.ai.projects.models.ComparisonFilter or + ~azure.ai.projects.models.CompoundFilter] + """ + + type: Literal["and", "or"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Type of operation: ``and`` or ``or``. Required. Is either a Literal[\"and\"] type or a + Literal[\"or\"] type.""" + filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Array of filters to combine. Items can be ``ComparisonFilter`` or ``CompoundFilter``. Required.""" + + @overload + def __init__( + self, + *, + type: Literal["and", "or"], + filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerAction(_Model): + """ComputerAction. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ComputerActionClick, ComputerActionDoubleClick, ComputerActionDrag, ComputerActionKeyPress, + ComputerActionMove, ComputerActionScreenshot, ComputerActionScroll, ComputerActionTypeKeys, + ComputerActionWait + + :ivar type: Required. Known values are: "screenshot", "click", "double_click", "scroll", + "type", "wait", "keypress", "drag", and "move". + :vartype type: str or ~azure.ai.projects.models.ComputerActionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"screenshot\", \"click\", \"double_click\", \"scroll\", \"type\", + \"wait\", \"keypress\", \"drag\", and \"move\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerActionClick(ComputerAction, discriminator="click"): + """A click action. + + :ivar type: Specifies the event type. For a click action, this property is + always set to ``click``. Required. + :vartype type: str or ~azure.ai.projects.models.CLICK + :ivar button: Indicates which mouse button was pressed during the click. One of ``left``, + ``right``, ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: + Literal["left"], Literal["right"], Literal["wheel"], Literal["back"], Literal["forward"] + :vartype button: str or str or str or str or str + :ivar x: The x-coordinate where the click occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the click occurred. Required. + :vartype y: int + """ + + type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a click action, this property is + always set to ``click``. Required.""" + button: Literal["left", "right", "wheel", "back", "forward"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates which mouse button was pressed during the click. One of ``left``, ``right``, + ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: + Literal[\"left\"], Literal[\"right\"], Literal[\"wheel\"], Literal[\"back\"], + Literal[\"forward\"]""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the click occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the click occurred. Required.""" + + @overload + def __init__( + self, + *, + button: Literal["left", "right", "wheel", "back", "forward"], + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.CLICK # type: ignore + + +class ComputerActionDoubleClick(ComputerAction, discriminator="double_click"): + """A double click action. + + :ivar type: Specifies the event type. For a double click action, this property is + always set to ``double_click``. Required. + :vartype type: str or ~azure.ai.projects.models.DOUBLE_CLICK + :ivar x: The x-coordinate where the double click occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the double click occurred. Required. + :vartype y: int + """ + + type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a double click action, this property is + always set to ``double_click``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the double click occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the double click occurred. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.DOUBLE_CLICK # type: ignore + + +class ComputerActionDrag(ComputerAction, discriminator="drag"): + """A drag action. + + :ivar type: Specifies the event type. For a drag action, this property is + always set to ``drag``. Required. + :vartype type: str or ~azure.ai.projects.models.DRAG + :ivar path: An array of coordinates representing the path of the drag action. Coordinates will + appear as an array + of objects, eg + .. code-block:: + [ + { x: 100, y: 200 }, + { x: 200, y: 300 } + ]. Required. + :vartype path: list[~azure.ai.projects.models.Coordinate] + """ + + type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a drag action, this property is + always set to ``drag``. Required.""" + path: list["_models.Coordinate"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of coordinates representing the path of the drag action. Coordinates will appear as an + array + of objects, eg + .. code-block:: + [ + { x: 100, y: 200 }, + { x: 200, y: 300 } + ]. Required.""" + + @overload + def __init__( + self, + *, + path: list["_models.Coordinate"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.DRAG # type: ignore + + +class ComputerActionKeyPress(ComputerAction, discriminator="keypress"): + """A collection of keypresses the model would like to perform. + + :ivar type: Specifies the event type. For a keypress action, this property is + always set to ``keypress``. Required. + :vartype type: str or ~azure.ai.projects.models.KEYPRESS + :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an + array of strings, each representing a key. Required. + :vartype keys_property: list[str] + """ + + type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a keypress action, this property is + always set to ``keypress``. Required.""" + keys_property: list[str] = rest_field(name="keys", visibility=["read", "create", "update", "delete", "query"]) + """The combination of keys the model is requesting to be pressed. This is an + array of strings, each representing a key. Required.""" + + @overload + def __init__( + self, + *, + keys_property: list[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.KEYPRESS # type: ignore + + +class ComputerActionMove(ComputerAction, discriminator="move"): + """A mouse move action. + + :ivar type: Specifies the event type. For a move action, this property is + always set to ``move``. Required. + :vartype type: str or ~azure.ai.projects.models.MOVE + :ivar x: The x-coordinate to move to. Required. + :vartype x: int + :ivar y: The y-coordinate to move to. Required. + :vartype y: int + """ + + type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a move action, this property is + always set to ``move``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate to move to. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate to move to. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.MOVE # type: ignore + + +class ComputerActionScreenshot(ComputerAction, discriminator="screenshot"): + """A screenshot action. + + :ivar type: Specifies the event type. For a screenshot action, this property is + always set to ``screenshot``. Required. + :vartype type: str or ~azure.ai.projects.models.SCREENSHOT + """ + + type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a screenshot action, this property is + always set to ``screenshot``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.SCREENSHOT # type: ignore + + +class ComputerActionScroll(ComputerAction, discriminator="scroll"): + """A scroll action. + + :ivar type: Specifies the event type. For a scroll action, this property is + always set to ``scroll``. Required. + :vartype type: str or ~azure.ai.projects.models.SCROLL + :ivar x: The x-coordinate where the scroll occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the scroll occurred. Required. + :vartype y: int + :ivar scroll_x: The horizontal scroll distance. Required. + :vartype scroll_x: int + :ivar scroll_y: The vertical scroll distance. Required. + :vartype scroll_y: int + """ + + type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a scroll action, this property is + always set to ``scroll``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the scroll occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the scroll occurred. Required.""" + scroll_x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The horizontal scroll distance. Required.""" + scroll_y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The vertical scroll distance. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + scroll_x: int, + scroll_y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.SCROLL # type: ignore + + +class ComputerActionTypeKeys(ComputerAction, discriminator="type"): + """An action to type in text. + + :ivar type: Specifies the event type. For a type action, this property is + always set to ``type``. Required. + :vartype type: str or ~azure.ai.projects.models.TYPE + :ivar text: The text to type. Required. + :vartype text: str + """ + + type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a type action, this property is + always set to ``type``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text to type. Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.TYPE # type: ignore + + +class ComputerActionWait(ComputerAction, discriminator="wait"): + """A wait action. + + :ivar type: Specifies the event type. For a wait action, this property is + always set to ``wait``. Required. + :vartype type: str or ~azure.ai.projects.models.WAIT + """ + + type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a wait action, this property is + always set to ``wait``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.WAIT # type: ignore + + +class ComputerToolCallItemParam(ItemParam, discriminator="computer_call"): + """A tool call to a computer use tool. See the + `computer use guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL + :ivar call_id: An identifier used when responding to the tool call with output. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.ComputerAction + :ivar pending_safety_checks: The pending safety checks for the computer call. Required. + :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + """ + + type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier used when responding to the tool call with output. Required.""" + action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The pending safety checks for the computer call. Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + action: "_models.ComputerAction", + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL # type: ignore + + +class ComputerToolCallItemResource(ItemResource, discriminator="computer_call"): + """A tool call to a computer use tool. See the + `computer use guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: An identifier used when responding to the tool call with output. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.ComputerAction + :ivar pending_safety_checks: The pending safety checks for the computer call. Required. + :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + """ + + type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier used when responding to the tool call with output. Required.""" + action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The pending safety checks for the computer call. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + action: "_models.ComputerAction", + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL # type: ignore + + +class ComputerToolCallOutputItemOutput(_Model): + """ComputerToolCallOutputItemOutput. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ComputerToolCallOutputItemOutputComputerScreenshot + + :ivar type: Required. "computer_screenshot" + :vartype type: str or ~azure.ai.projects.models.ComputerToolCallOutputItemOutputType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. \"computer_screenshot\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerToolCallOutputItemOutputComputerScreenshot( + ComputerToolCallOutputItemOutput, discriminator="computer_screenshot" +): # pylint: disable=name-too-long + """ComputerToolCallOutputItemOutputComputerScreenshot. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.SCREENSHOT + :ivar image_url: + :vartype image_url: str + :ivar file_id: + :vartype file_id: str + """ + + type: Literal[ComputerToolCallOutputItemOutputType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + image_url: Optional[str] = None, + file_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerToolCallOutputItemOutputType.SCREENSHOT # type: ignore + + +class ComputerToolCallOutputItemParam(ItemParam, discriminator="computer_call_output"): + """The output of a computer tool call. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT + :ivar call_id: The ID of the computer tool call that produced the output. Required. + :vartype call_id: str + :ivar acknowledged_safety_checks: The safety checks reported by the API that have been + acknowledged by the + developer. + :vartype acknowledged_safety_checks: + list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + :ivar output: Required. + :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput + """ + + type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the computer tool call that produced the output. Required.""" + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The safety checks reported by the API that have been acknowledged by the + developer.""" + output: "_models.ComputerToolCallOutputItemOutput" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + output: "_models.ComputerToolCallOutputItemOutput", + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore + + +class ComputerToolCallOutputItemResource(ItemResource, discriminator="computer_call_output"): + """The output of a computer tool call. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The ID of the computer tool call that produced the output. Required. + :vartype call_id: str + :ivar acknowledged_safety_checks: The safety checks reported by the API that have been + acknowledged by the + developer. + :vartype acknowledged_safety_checks: + list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + :ivar output: Required. + :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput + """ + + type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the computer tool call that produced the output. Required.""" + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The safety checks reported by the API that have been acknowledged by the + developer.""" + output: "_models.ComputerToolCallOutputItemOutput" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + output: "_models.ComputerToolCallOutputItemOutput", + created_by: Optional["_models.CreatedBy"] = None, + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore + + +class ComputerToolCallSafetyCheck(_Model): + """A pending safety check for the computer call. + + :ivar id: The ID of the pending safety check. Required. + :vartype id: str + :ivar code: The type of the pending safety check. Required. + :vartype code: str + :ivar message: Details about the pending safety check. Required. + :vartype message: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the pending safety check. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the pending safety check. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Details about the pending safety check. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + code: str, + message: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"): + """A tool that controls a virtual computer. Learn more about the `computer tool + `_. + + :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_USE_PREVIEW + :ivar environment: The type of computer environment to control. Required. Is one of the + following types: Literal["windows"], Literal["mac"], Literal["linux"], Literal["ubuntu"], + Literal["browser"] + :vartype environment: str or str or str or str or str + :ivar display_width: The width of the computer display. Required. + :vartype display_width: int + :ivar display_height: The height of the computer display. Required. + :vartype display_height: int + """ + + type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the computer use tool. Always ``computer_use_preview``. Required.""" + environment: Literal["windows", "mac", "linux", "ubuntu", "browser"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of computer environment to control. Required. Is one of the following types: + Literal[\"windows\"], Literal[\"mac\"], Literal[\"linux\"], Literal[\"ubuntu\"], + Literal[\"browser\"]""" + display_width: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The width of the computer display. Required.""" + display_height: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The height of the computer display. Required.""" + + @overload + def __init__( + self, + *, + environment: Literal["windows", "mac", "linux", "ubuntu", "browser"], + display_width: int, + display_height: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.COMPUTER_USE_PREVIEW # type: ignore + + +class Connection(_Model): + """Response from the list and get connections operations. + + :ivar name: The friendly name of the connection, provided by the user. Required. + :vartype name: str + :ivar id: A unique identifier for the connection, generated by the service. Required. + :vartype id: str + :ivar type: Category of the connection. Required. Known values are: "AzureOpenAI", "AzureBlob", + "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", "AppConfig", "AppInsights", + "CustomKeys", and "RemoteTool". + :vartype type: str or ~azure.ai.projects.models.ConnectionType + :ivar target: The connection URL to be used for this service. Required. + :vartype target: str + :ivar is_default: Whether the connection is tagged as the default connection of its type. + Required. + :vartype is_default: bool + :ivar credentials: The credentials used by the connection. Required. + :vartype credentials: ~azure.ai.projects.models.BaseCredentials + :ivar metadata: Metadata of the connection. Required. + :vartype metadata: dict[str, str] + """ + + name: str = rest_field(visibility=["read"]) + """The friendly name of the connection, provided by the user. Required.""" + id: str = rest_field(visibility=["read"]) + """A unique identifier for the connection, generated by the service. Required.""" + type: Union[str, "_models.ConnectionType"] = rest_field(visibility=["read"]) + """Category of the connection. Required. Known values are: \"AzureOpenAI\", \"AzureBlob\", + \"AzureStorageAccount\", \"CognitiveSearch\", \"CosmosDB\", \"ApiKey\", \"AppConfig\", + \"AppInsights\", \"CustomKeys\", and \"RemoteTool\".""" + target: str = rest_field(visibility=["read"]) + """The connection URL to be used for this service. Required.""" + is_default: bool = rest_field(name="isDefault", visibility=["read"]) + """Whether the connection is tagged as the default connection of its type. Required.""" + credentials: "_models.BaseCredentials" = rest_field(visibility=["read"]) + """The credentials used by the connection. Required.""" + metadata: dict[str, str] = rest_field(visibility=["read"]) + """Metadata of the connection. Required.""" + + +class ContainerAppAgentDefinition(AgentDefinition, discriminator="container_app"): + """The container app agent definition. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.CONTAINER_APP + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar container_app_resource_id: The resource ID of the Azure Container App that hosts this + agent. Not mutable across versions. Required. + :vartype container_app_resource_id: str + :ivar ingress_subdomain_suffix: The suffix to apply to the app subdomain when sending ingress + to the agent. This can be a label (e.g., '---current'), a specific revision (e.g., + '--0000001'), or empty to use the default endpoint for the container app. Required. + :vartype ingress_subdomain_suffix: str + """ + + kind: Literal[AgentKind.CONTAINER_APP] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocols that the agent supports for ingress communication of the containers. Required.""" + container_app_resource_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The resource ID of the Azure Container App that hosts this agent. Not mutable across versions. + Required.""" + ingress_subdomain_suffix: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The suffix to apply to the app subdomain when sending ingress to the agent. This can be a label + (e.g., '---current'), a specific revision (e.g., '--0000001'), or empty to use the default + endpoint for the container app. Required.""" + + @overload + def __init__( + self, + *, + container_protocol_versions: list["_models.ProtocolVersionRecord"], + container_app_resource_id: str, + ingress_subdomain_suffix: str, + rai_config: Optional["_models.RaiConfig"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.CONTAINER_APP # type: ignore + + +class EvaluationRuleAction(_Model): + """Evaluation action model. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ContinuousEvaluationRuleAction, HumanEvaluationRuleAction + + :ivar type: Type of the evaluation action. Required. Known values are: "continuousEvaluation" + and "humanEvaluation". + :vartype type: str or ~azure.ai.projects.models.EvaluationRuleActionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the evaluation action. Required. Known values are: \"continuousEvaluation\" and + \"humanEvaluation\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ContinuousEvaluationRuleAction(EvaluationRuleAction, discriminator="continuousEvaluation"): + """Evaluation rule action for continuous evaluation. + + :ivar type: Required. Continuous evaluation. + :vartype type: str or ~azure.ai.projects.models.CONTINUOUS_EVALUATION + :ivar eval_id: Eval Id to add continuous evaluation runs to. Required. + :vartype eval_id: str + :ivar max_hourly_runs: Maximum number of evaluation runs allowed per hour. + :vartype max_hourly_runs: int + """ + + type: Literal[EvaluationRuleActionType.CONTINUOUS_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Continuous evaluation.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Eval Id to add continuous evaluation runs to. Required.""" + max_hourly_runs: Optional[int] = rest_field( + name="maxHourlyRuns", visibility=["read", "create", "update", "delete", "query"] + ) + """Maximum number of evaluation runs allowed per hour.""" + + @overload + def __init__( + self, + *, + eval_id: str, + max_hourly_runs: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluationRuleActionType.CONTINUOUS_EVALUATION # type: ignore + + +class Coordinate(_Model): + """An x/y coordinate pair, e.g. ``{ x: 100, y: 200 }``. + + :ivar x: The x-coordinate. Required. + :vartype x: int + :ivar y: The y-coordinate. Required. + :vartype y: int + """ + + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): + """CosmosDB Vector Store Index Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. CosmosDB + :vartype type: str or ~azure.ai.projects.models.COSMOS_DB + :ivar connection_name: Name of connection to CosmosDB. Required. + :vartype connection_name: str + :ivar database_name: Name of the CosmosDB Database. Required. + :vartype database_name: str + :ivar container_name: Name of CosmosDB Container. Required. + :vartype container_name: str + :ivar embedding_configuration: Embedding model configuration. Required. + :vartype embedding_configuration: ~azure.ai.projects.models.EmbeddingConfiguration + :ivar field_mapping: Field mapping configuration. Required. + :vartype field_mapping: ~azure.ai.projects.models.FieldMapping + """ + + type: Literal[IndexType.COSMOS_DB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. CosmosDB""" + connection_name: str = rest_field(name="connectionName", visibility=["create"]) + """Name of connection to CosmosDB. Required.""" + database_name: str = rest_field(name="databaseName", visibility=["create"]) + """Name of the CosmosDB Database. Required.""" + container_name: str = rest_field(name="containerName", visibility=["create"]) + """Name of CosmosDB Container. Required.""" + embedding_configuration: "_models.EmbeddingConfiguration" = rest_field( + name="embeddingConfiguration", visibility=["create"] + ) + """Embedding model configuration. Required.""" + field_mapping: "_models.FieldMapping" = rest_field(name="fieldMapping", visibility=["create"]) + """Field mapping configuration. Required.""" + + @overload + def __init__( + self, + *, + connection_name: str, + database_name: str, + container_name: str, + embedding_configuration: "_models.EmbeddingConfiguration", + field_mapping: "_models.FieldMapping", + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = IndexType.COSMOS_DB # type: ignore + + +class CreatedBy(_Model): + """CreatedBy. + + :ivar agent: The agent that created the item. + :vartype agent: ~azure.ai.projects.models.AgentId + :ivar response_id: The response on which the item is created. + :vartype response_id: str + """ + + agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The agent that created the item.""" + response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response on which the item is created.""" + + @overload + def __init__( + self, + *, + agent: Optional["_models.AgentId"] = None, + response_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Trigger(_Model): + """Base model for Trigger of the schedule. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CronTrigger, OneTimeTrigger, RecurrenceTrigger + + :ivar type: Type of the trigger. Required. Known values are: "Cron", "Recurrence", and + "OneTime". + :vartype type: str or ~azure.ai.projects.models.TriggerType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the trigger. Required. Known values are: \"Cron\", \"Recurrence\", and \"OneTime\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CronTrigger(Trigger, discriminator="Cron"): + """Cron based trigger. + + :ivar type: Required. Cron based trigger. + :vartype type: str or ~azure.ai.projects.models.CRON + :ivar expression: Cron expression that defines the schedule frequency. Required. + :vartype expression: str + :ivar time_zone: Time zone for the cron schedule. + :vartype time_zone: str + :ivar start_time: Start time for the cron schedule in ISO 8601 format. + :vartype start_time: str + :ivar end_time: End time for the cron schedule in ISO 8601 format. + :vartype end_time: str + """ + + type: Literal[TriggerType.CRON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Cron based trigger.""" + expression: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Cron expression that defines the schedule frequency. Required.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the cron schedule.""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) + """Start time for the cron schedule in ISO 8601 format.""" + end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) + """End time for the cron schedule in ISO 8601 format.""" + + @overload + def __init__( + self, + *, + expression: str, + time_zone: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = TriggerType.CRON # type: ignore + + +class CustomCredential(BaseCredentials, discriminator="CustomKeys"): + """Custom credential definition. + + :ivar type: The credential type. Required. Custom credential + :vartype type: str or ~azure.ai.projects.models.CUSTOM + """ + + type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Custom credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.CUSTOM # type: ignore + + +class RecurrenceSchedule(_Model): + """Recurrence schedule model. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DailyRecurrenceSchedule, HourlyRecurrenceSchedule, MonthlyRecurrenceSchedule, + WeeklyRecurrenceSchedule + + :ivar type: Recurrence type for the recurrence schedule. Required. Known values are: "Hourly", + "Daily", "Weekly", and "Monthly". + :vartype type: str or ~azure.ai.projects.models.RecurrenceType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Recurrence type for the recurrence schedule. Required. Known values are: \"Hourly\", \"Daily\", + \"Weekly\", and \"Monthly\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DailyRecurrenceSchedule(RecurrenceSchedule, discriminator="Daily"): + """Daily recurrence schedule. + + :ivar type: Daily recurrence type. Required. Daily recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.DAILY + :ivar hours: Hours for the recurrence schedule. Required. + :vartype hours: list[int] + """ + + type: Literal[RecurrenceType.DAILY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Daily recurrence type. Required. Daily recurrence pattern.""" + hours: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Hours for the recurrence schedule. Required.""" + + @overload + def __init__( + self, + *, + hours: list[int], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.DAILY # type: ignore + + +class DatasetCredential(_Model): + """Represents a reference to a blob for consumption. + + :ivar blob_reference: Credential info to access the storage account. Required. + :vartype blob_reference: ~azure.ai.projects.models.BlobReference + """ + + blob_reference: "_models.BlobReference" = rest_field( + name="blobReference", visibility=["read", "create", "update", "delete", "query"] + ) + """Credential info to access the storage account. Required.""" + + @overload + def __init__( + self, + *, + blob_reference: "_models.BlobReference", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DatasetVersion(_Model): + """DatasetVersion Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FileDatasetVersion, FolderDatasetVersion + + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. + :vartype data_uri: str + :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". + :vartype type: str or ~azure.ai.projects.models.DatasetType + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + data_uri: str = rest_field(name="dataUri", visibility=["read", "create"]) + """URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" + is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) + """Indicates if the dataset holds a reference to the storage, or the dataset manages storage + itself. If true, the underlying data will not be deleted when the dataset version is deleted.""" + connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read", "create"]) + """The Azure Storage Account connection name. Required if startPendingUploadVersion was not called + before creating the Dataset.""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + data_uri: str, + type: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DeleteAgentResponse(_Model): + """A deleted agent Object. + + :ivar object: The object type. Always 'agent.deleted'. Required. Default value is + "agent.deleted". + :vartype object: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar deleted: Whether the agent was successfully deleted. Required. + :vartype deleted: bool + """ + + object: Literal["agent.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'agent.deleted'. Required. Default value is \"agent.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the agent was successfully deleted. Required.""" + + @overload + def __init__( + self, + *, + name: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.deleted"] = "agent.deleted" + + +class DeleteAgentVersionResponse(_Model): + """A deleted agent version Object. + + :ivar object: The object type. Always 'agent.deleted'. Required. Default value is + "agent.version.deleted". + :vartype object: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar version: The version identifier of the agent. Required. + :vartype version: str + :ivar deleted: Whether the agent was successfully deleted. Required. + :vartype deleted: bool + """ + + object: Literal["agent.version.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'agent.deleted'. Required. Default value is \"agent.version.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the agent was successfully deleted. Required.""" + + @overload + def __init__( + self, + *, + name: str, + version: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.version.deleted"] = "agent.version.deleted" + + +class DeleteMemoryStoreResponse(_Model): + """DeleteMemoryStoreResponse. + + :ivar object: The object type. Always 'memory_store.deleted'. Required. Default value is + "memory_store.deleted". + :vartype object: str + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar deleted: Whether the memory store was successfully deleted. Required. + :vartype deleted: bool + """ + + object: Literal["memory_store.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'memory_store.deleted'. Required. Default value is + \"memory_store.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the memory store was successfully deleted. Required.""" + + @overload + def __init__( + self, + *, + name: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["memory_store.deleted"] = "memory_store.deleted" + + +class Deployment(_Model): + """Model Deployment Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ModelDeployment + + :ivar type: The type of the deployment. Required. "ModelDeployment" + :vartype type: str or ~azure.ai.projects.models.DeploymentType + :ivar name: Name of the deployment. Required. + :vartype name: str + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of the deployment. Required. \"ModelDeployment\"""" + name: str = rest_field(visibility=["read"]) + """Name of the deployment. Required.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EmbeddingConfiguration(_Model): + """Embedding configuration class. + + :ivar model_deployment_name: Deployment name of embedding model. It can point to a model + deployment either in the parent AIServices or a connection. Required. + :vartype model_deployment_name: str + :ivar embedding_field: Embedding field. Required. + :vartype embedding_field: str + """ + + model_deployment_name: str = rest_field(name="modelDeploymentName", visibility=["create"]) + """Deployment name of embedding model. It can point to a model deployment either in the parent + AIServices or a connection. Required.""" + embedding_field: str = rest_field(name="embeddingField", visibility=["create"]) + """Embedding field. Required.""" + + @overload + def __init__( + self, + *, + model_deployment_name: str, + embedding_field: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EntraIDCredentials(BaseCredentials, discriminator="AAD"): + """Entra ID credential definition. + + :ivar type: The credential type. Required. Entra ID credential (formerly known as AAD) + :vartype type: str or ~azure.ai.projects.models.ENTRA_ID + """ + + type: Literal[CredentialType.ENTRA_ID] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Entra ID credential (formerly known as AAD)""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.ENTRA_ID # type: ignore + + +class EvalCompareReport(InsightResult, discriminator="EvaluationComparison"): + """Insights from the evaluation comparison. + + :ivar type: The type of insights result. Required. Evaluation Comparison. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON + :ivar comparisons: Comparison results for each treatment run against the baseline. Required. + :vartype comparisons: list[~azure.ai.projects.models.EvalRunResultComparison] + :ivar method: The statistical method used for comparison. Required. + :vartype method: str + """ + + type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Evaluation Comparison.""" + comparisons: list["_models.EvalRunResultComparison"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Comparison results for each treatment run against the baseline. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The statistical method used for comparison. Required.""" + + @overload + def __init__( + self, + *, + comparisons: list["_models.EvalRunResultComparison"], + method: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_COMPARISON # type: ignore + + +class EvalResult(_Model): + """Result of the evaluation. + + :ivar name: name of the check. Required. + :vartype name: str + :ivar type: type of the check. Required. + :vartype type: str + :ivar score: score. Required. + :vartype score: float + :ivar passed: indicates if the check passed or failed. Required. + :vartype passed: bool + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """name of the check. Required.""" + type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """type of the check. Required.""" + score: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """score. Required.""" + passed: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """indicates if the check passed or failed. Required.""" + + @overload + def __init__( + self, + *, + name: str, + type: str, + score: float, + passed: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvalRunResultCompareItem(_Model): + """Metric comparison for a treatment against the baseline. + + :ivar treatment_run_id: The treatment run ID. Required. + :vartype treatment_run_id: str + :ivar treatment_run_summary: Summary statistics of the treatment run. Required. + :vartype treatment_run_summary: ~azure.ai.projects.models.EvalRunResultSummary + :ivar delta_estimate: Estimated difference between treatment and baseline. Required. + :vartype delta_estimate: float + :ivar p_value: P-value for the treatment effect. Required. + :vartype p_value: float + :ivar treatment_effect: Type of treatment effect. Required. Known values are: "TooFewSamples", + "Inconclusive", "Changed", "Improved", and "Degraded". + :vartype treatment_effect: str or ~azure.ai.projects.models.TreatmentEffectType + """ + + treatment_run_id: str = rest_field( + name="treatmentRunId", visibility=["read", "create", "update", "delete", "query"] + ) + """The treatment run ID. Required.""" + treatment_run_summary: "_models.EvalRunResultSummary" = rest_field( + name="treatmentRunSummary", visibility=["read", "create", "update", "delete", "query"] + ) + """Summary statistics of the treatment run. Required.""" + delta_estimate: float = rest_field(name="deltaEstimate", visibility=["read", "create", "update", "delete", "query"]) + """Estimated difference between treatment and baseline. Required.""" + p_value: float = rest_field(name="pValue", visibility=["read", "create", "update", "delete", "query"]) + """P-value for the treatment effect. Required.""" + treatment_effect: Union[str, "_models.TreatmentEffectType"] = rest_field( + name="treatmentEffect", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of treatment effect. Required. Known values are: \"TooFewSamples\", \"Inconclusive\", + \"Changed\", \"Improved\", and \"Degraded\".""" + + @overload + def __init__( + self, + *, + treatment_run_id: str, + treatment_run_summary: "_models.EvalRunResultSummary", + delta_estimate: float, + p_value: float, + treatment_effect: Union[str, "_models.TreatmentEffectType"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvalRunResultComparison(_Model): + """Comparison results for treatment runs against the baseline. + + :ivar testing_criteria: Name of the testing criteria. Required. + :vartype testing_criteria: str + :ivar metric: Metric being evaluated. Required. + :vartype metric: str + :ivar evaluator: Name of the evaluator for this testing criteria. Required. + :vartype evaluator: str + :ivar baseline_run_summary: Summary statistics of the baseline run. Required. + :vartype baseline_run_summary: ~azure.ai.projects.models.EvalRunResultSummary + :ivar compare_items: List of comparison results for each treatment run. Required. + :vartype compare_items: list[~azure.ai.projects.models.EvalRunResultCompareItem] + """ + + testing_criteria: str = rest_field( + name="testingCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the testing criteria. Required.""" + metric: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metric being evaluated. Required.""" + evaluator: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the evaluator for this testing criteria. Required.""" + baseline_run_summary: "_models.EvalRunResultSummary" = rest_field( + name="baselineRunSummary", visibility=["read", "create", "update", "delete", "query"] + ) + """Summary statistics of the baseline run. Required.""" + compare_items: list["_models.EvalRunResultCompareItem"] = rest_field( + name="compareItems", visibility=["read", "create", "update", "delete", "query"] + ) + """List of comparison results for each treatment run. Required.""" + + @overload + def __init__( + self, + *, + testing_criteria: str, + metric: str, + evaluator: str, + baseline_run_summary: "_models.EvalRunResultSummary", + compare_items: list["_models.EvalRunResultCompareItem"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvalRunResultSummary(_Model): + """Summary statistics of a metric in an evaluation run. + + :ivar run_id: The evaluation run ID. Required. + :vartype run_id: str + :ivar sample_count: Number of samples in the evaluation run. Required. + :vartype sample_count: int + :ivar average: Average value of the metric in the evaluation run. Required. + :vartype average: float + :ivar standard_deviation: Standard deviation of the metric in the evaluation run. Required. + :vartype standard_deviation: float + """ + + run_id: str = rest_field(name="runId", visibility=["read", "create", "update", "delete", "query"]) + """The evaluation run ID. Required.""" + sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) + """Number of samples in the evaluation run. Required.""" + average: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Average value of the metric in the evaluation run. Required.""" + standard_deviation: float = rest_field( + name="standardDeviation", visibility=["read", "create", "update", "delete", "query"] + ) + """Standard deviation of the metric in the evaluation run. Required.""" + + @overload + def __init__( + self, + *, + run_id: str, + sample_count: int, + average: float, + standard_deviation: float, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationComparisonRequest(InsightRequest, discriminator="EvaluationComparison"): + """Evaluation Comparison Request. + + :ivar type: The type of request. Required. Evaluation Comparison. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON + :ivar eval_id: Identifier for the evaluation. Required. + :vartype eval_id: str + :ivar baseline_run_id: The baseline run ID for comparison. Required. + :vartype baseline_run_id: str + :ivar treatment_run_ids: List of treatment run IDs for comparison. Required. + :vartype treatment_run_ids: list[str] + """ + + type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of request. Required. Evaluation Comparison.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier for the evaluation. Required.""" + baseline_run_id: str = rest_field(name="baselineRunId", visibility=["read", "create", "update", "delete", "query"]) + """The baseline run ID for comparison. Required.""" + treatment_run_ids: list[str] = rest_field( + name="treatmentRunIds", visibility=["read", "create", "update", "delete", "query"] + ) + """List of treatment run IDs for comparison. Required.""" + + @overload + def __init__( + self, + *, + eval_id: str, + baseline_run_id: str, + treatment_run_ids: list[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_COMPARISON # type: ignore + + +class InsightSample(_Model): + """A sample from the analysis. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EvaluationResultSample + + :ivar id: The unique identifier for the analysis sample. Required. + :vartype id: str + :ivar type: Sample type. Required. "EvaluationResultSample" + :vartype type: str or ~azure.ai.projects.models.SampleType + :ivar features: Features to help with additional filtering of data in UX. Required. + :vartype features: dict[str, any] + :ivar correlation_info: Info about the correlation for the analysis sample. Required. + :vartype correlation_info: dict[str, any] + """ + + __mapping__: dict[str, _Model] = {} + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier for the analysis sample. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Sample type. Required. \"EvaluationResultSample\"""" + features: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Features to help with additional filtering of data in UX. Required.""" + correlation_info: dict[str, Any] = rest_field( + name="correlationInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """Info about the correlation for the analysis sample. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + type: str, + features: dict[str, Any], + correlation_info: dict[str, Any], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationResultSample(InsightSample, discriminator="EvaluationResultSample"): + """A sample from the evaluation result. + + :ivar id: The unique identifier for the analysis sample. Required. + :vartype id: str + :ivar features: Features to help with additional filtering of data in UX. Required. + :vartype features: dict[str, any] + :ivar correlation_info: Info about the correlation for the analysis sample. Required. + :vartype correlation_info: dict[str, any] + :ivar type: Evaluation Result Sample Type. Required. A sample from the evaluation result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RESULT_SAMPLE + :ivar evaluation_result: Evaluation result for the analysis sample. Required. + :vartype evaluation_result: ~azure.ai.projects.models.EvalResult + """ + + type: Literal[SampleType.EVALUATION_RESULT_SAMPLE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Evaluation Result Sample Type. Required. A sample from the evaluation result.""" + evaluation_result: "_models.EvalResult" = rest_field( + name="evaluationResult", visibility=["read", "create", "update", "delete", "query"] + ) + """Evaluation result for the analysis sample. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + features: dict[str, Any], + correlation_info: dict[str, Any], + evaluation_result: "_models.EvalResult", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = SampleType.EVALUATION_RESULT_SAMPLE # type: ignore + + +class EvaluationRule(_Model): + """Evaluation rule model. + + :ivar id: Unique identifier for the evaluation rule. Required. + :vartype id: str + :ivar display_name: Display Name for the evaluation rule. + :vartype display_name: str + :ivar description: Description for the evaluation rule. + :vartype description: str + :ivar action: Definition of the evaluation rule action. Required. + :vartype action: ~azure.ai.projects.models.EvaluationRuleAction + :ivar filter: Filter condition of the evaluation rule. + :vartype filter: ~azure.ai.projects.models.EvaluationRuleFilter + :ivar event_type: Event type that the evaluation rule applies to. Required. Known values are: + "response.completed" and "manual". + :vartype event_type: str or ~azure.ai.projects.models.EvaluationRuleEventType + :ivar enabled: Indicates whether the evaluation rule is enabled. Default is true. Required. + :vartype enabled: bool + :ivar system_data: System metadata for the evaluation rule. Required. + :vartype system_data: dict[str, str] + """ + + id: str = rest_field(visibility=["read"]) + """Unique identifier for the evaluation rule. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display Name for the evaluation rule.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description for the evaluation rule.""" + action: "_models.EvaluationRuleAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Definition of the evaluation rule action. Required.""" + filter: Optional["_models.EvaluationRuleFilter"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Filter condition of the evaluation rule.""" + event_type: Union[str, "_models.EvaluationRuleEventType"] = rest_field( + name="eventType", visibility=["read", "create", "update", "delete", "query"] + ) + """Event type that the evaluation rule applies to. Required. Known values are: + \"response.completed\" and \"manual\".""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether the evaluation rule is enabled. Default is true. Required.""" + system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) + """System metadata for the evaluation rule. Required.""" + + @overload + def __init__( + self, + *, + action: "_models.EvaluationRuleAction", + event_type: Union[str, "_models.EvaluationRuleEventType"], + enabled: bool, + display_name: Optional[str] = None, + description: Optional[str] = None, + filter: Optional["_models.EvaluationRuleFilter"] = None, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationRuleFilter(_Model): + """Evaluation filter model. + + :ivar agent_name: Filter by agent name. Required. + :vartype agent_name: str + """ + + agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) + """Filter by agent name. Required.""" + + @overload + def __init__( + self, + *, + agent_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationRunClusterInsightResult(InsightResult, discriminator="EvaluationRunClusterInsight"): + """Insights from the evaluation run cluster analysis. + + :ivar type: The type of insights result. Required. Insights on an Evaluation run result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT + :ivar cluster_insight: Required. + :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult + """ + + type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Insights on an Evaluation run result.""" + cluster_insight: "_models.ClusterInsightResult" = rest_field( + name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + cluster_insight: "_models.ClusterInsightResult", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore + + +class EvaluationRunClusterInsightsRequest(InsightRequest, discriminator="EvaluationRunClusterInsight"): + """Insights on set of Evaluation Results. + + :ivar type: The type of insights request. Required. Insights on an Evaluation run result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT + :ivar eval_id: Evaluation Id for the insights. Required. + :vartype eval_id: str + :ivar run_ids: List of evaluation run IDs for the insights. Required. + :vartype run_ids: list[str] + :ivar model_configuration: Configuration of the model used in the insight generation. + :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration + """ + + type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights request. Required. Insights on an Evaluation run result.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Evaluation Id for the insights. Required.""" + run_ids: list[str] = rest_field(name="runIds", visibility=["read", "create", "update", "delete", "query"]) + """List of evaluation run IDs for the insights. Required.""" + model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( + name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration of the model used in the insight generation.""" + + @overload + def __init__( + self, + *, + eval_id: str, + run_ids: list[str], + model_configuration: Optional["_models.InsightModelConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore + + +class ScheduleTask(_Model): + """Schedule task model. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EvaluationScheduleTask, InsightScheduleTask + + :ivar type: Type of the task. Required. Known values are: "Evaluation" and "Insight". + :vartype type: str or ~azure.ai.projects.models.ScheduleTaskType + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the task. Required. Known values are: \"Evaluation\" and \"Insight\".""" + configuration: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Configuration for the task.""" + + @overload + def __init__( + self, + *, + type: str, + configuration: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationScheduleTask(ScheduleTask, discriminator="Evaluation"): + """Evaluation task for the schedule. + + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + :ivar type: Required. Evaluation task. + :vartype type: str or ~azure.ai.projects.models.EVALUATION + :ivar eval_id: Identifier of the evaluation group. Required. + :vartype eval_id: str + :ivar eval_run: The evaluation run payload. Required. + :vartype eval_run: any + """ + + type: Literal[ScheduleTaskType.EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Evaluation task.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier of the evaluation group. Required.""" + eval_run: Any = rest_field(name="evalRun", visibility=["read", "create", "update", "delete", "query"]) + """The evaluation run payload. Required.""" + + @overload + def __init__( + self, + *, + eval_id: str, + eval_run: Any, + configuration: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ScheduleTaskType.EVALUATION # type: ignore + + +class EvaluationTaxonomy(_Model): + """Evaluation Taxonomy Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar taxonomy_input: Input configuration for the evaluation taxonomy. Required. + :vartype taxonomy_input: ~azure.ai.projects.models.EvaluationTaxonomyInput + :ivar taxonomy_categories: List of taxonomy categories. + :vartype taxonomy_categories: list[~azure.ai.projects.models.TaxonomyCategory] + :ivar properties: Additional properties for the evaluation taxonomy. + :vartype properties: dict[str, str] + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + taxonomy_input: "_models.EvaluationTaxonomyInput" = rest_field( + name="taxonomyInput", visibility=["read", "create", "update", "delete", "query"] + ) + """Input configuration for the evaluation taxonomy. Required.""" + taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = rest_field( + name="taxonomyCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of taxonomy categories.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional properties for the evaluation taxonomy.""" + + @overload + def __init__( + self, + *, + taxonomy_input: "_models.EvaluationTaxonomyInput", + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluatorMetric(_Model): + """Evaluator Metric. + + :ivar type: Type of the metric. Known values are: "ordinal", "continuous", and "boolean". + :vartype type: str or ~azure.ai.projects.models.EvaluatorMetricType + :ivar desirable_direction: It indicates whether a higher value is better or a lower value is + better for this metric. Known values are: "increase", "decrease", and "neutral". + :vartype desirable_direction: str or ~azure.ai.projects.models.EvaluatorMetricDirection + :ivar min_value: Minimum value for the metric. + :vartype min_value: float + :ivar max_value: Maximum value for the metric. If not specified, it is assumed to be unbounded. + :vartype max_value: float + :ivar is_primary: Indicates if this metric is primary when there are multiple metrics. + :vartype is_primary: bool + """ + + type: Optional[Union[str, "_models.EvaluatorMetricType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the metric. Known values are: \"ordinal\", \"continuous\", and \"boolean\".""" + desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """It indicates whether a higher value is better or a lower value is better for this metric. Known + values are: \"increase\", \"decrease\", and \"neutral\".""" + min_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Minimum value for the metric.""" + max_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Maximum value for the metric. If not specified, it is assumed to be unbounded.""" + is_primary: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates if this metric is primary when there are multiple metrics.""" + + @overload + def __init__( + self, + *, + type: Optional[Union[str, "_models.EvaluatorMetricType"]] = None, + desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = None, + min_value: Optional[float] = None, + max_value: Optional[float] = None, + is_primary: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluatorVersion(_Model): + """Evaluator Definition. + + :ivar display_name: Display Name for evaluator. It helps to find the evaluator easily in AI + Foundry. It does not need to be unique. + :vartype display_name: str + :ivar metadata: Metadata about the evaluator. + :vartype metadata: dict[str, str] + :ivar evaluator_type: The type of the evaluator. Required. Known values are: "builtin" and + "custom". + :vartype evaluator_type: str or ~azure.ai.projects.models.EvaluatorType + :ivar categories: The categories of the evaluator. Required. + :vartype categories: list[str or ~azure.ai.projects.models.EvaluatorCategory] + :ivar definition: Definition of the evaluator. Required. + :vartype definition: ~azure.ai.projects.models.EvaluatorDefinition + :ivar created_by: Creator of the evaluator. Required. + :vartype created_by: str + :ivar created_at: Creation date/time of the evaluator. Required. + :vartype created_at: int + :ivar modified_at: Last modified date/time of the evaluator. Required. + :vartype modified_at: int + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + display_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Display Name for evaluator. It helps to find the evaluator easily in AI Foundry. It does not + need to be unique.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metadata about the evaluator.""" + evaluator_type: Union[str, "_models.EvaluatorType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of the evaluator. Required. Known values are: \"builtin\" and \"custom\".""" + categories: list[Union[str, "_models.EvaluatorCategory"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The categories of the evaluator. Required.""" + definition: "_models.EvaluatorDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Definition of the evaluator. Required.""" + created_by: str = rest_field(visibility=["read"]) + """Creator of the evaluator. Required.""" + created_at: int = rest_field(visibility=["read"]) + """Creation date/time of the evaluator. Required.""" + modified_at: int = rest_field(visibility=["read"]) + """Last modified date/time of the evaluator. Required.""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + evaluator_type: Union[str, "_models.EvaluatorType"], + categories: list[Union[str, "_models.EvaluatorCategory"]], + definition: "_models.EvaluatorDefinition", + display_name: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FabricDataAgentToolParameters(_Model): + """The fabric data agent tool parameters. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. + :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] + """ + + project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" + + @overload + def __init__( + self, + *, + project_connections: Optional[list["_models.ToolProjectConnection"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FieldMapping(_Model): + """Field mapping configuration class. + + :ivar content_fields: List of fields with text content. Required. + :vartype content_fields: list[str] + :ivar filepath_field: Path of file to be used as a source of text content. + :vartype filepath_field: str + :ivar title_field: Field containing the title of the document. + :vartype title_field: str + :ivar url_field: Field containing the url of the document. + :vartype url_field: str + :ivar vector_fields: List of fields with vector content. + :vartype vector_fields: list[str] + :ivar metadata_fields: List of fields with metadata content. + :vartype metadata_fields: list[str] + """ + + content_fields: list[str] = rest_field(name="contentFields", visibility=["create"]) + """List of fields with text content. Required.""" + filepath_field: Optional[str] = rest_field(name="filepathField", visibility=["create"]) + """Path of file to be used as a source of text content.""" + title_field: Optional[str] = rest_field(name="titleField", visibility=["create"]) + """Field containing the title of the document.""" + url_field: Optional[str] = rest_field(name="urlField", visibility=["create"]) + """Field containing the url of the document.""" + vector_fields: Optional[list[str]] = rest_field(name="vectorFields", visibility=["create"]) + """List of fields with vector content.""" + metadata_fields: Optional[list[str]] = rest_field(name="metadataFields", visibility=["create"]) + """List of fields with metadata content.""" + + @overload + def __init__( + self, + *, + content_fields: list[str], + filepath_field: Optional[str] = None, + title_field: Optional[str] = None, + url_field: Optional[str] = None, + vector_fields: Optional[list[str]] = None, + metadata_fields: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): + """FileDatasetVersion Definition. + + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. + :vartype data_uri: str + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI file. + :vartype type: str or ~azure.ai.projects.models.URI_FILE + """ + + type: Literal[DatasetType.URI_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI file.""" + + @overload + def __init__( + self, + *, + data_uri: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = DatasetType.URI_FILE # type: ignore + + +class FileSearchTool(Tool, discriminator="file_search"): + """A tool that searches for relevant content from uploaded files. Learn more about the `file + search tool `_. + + :ivar type: The type of the file search tool. Always ``file_search``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH + :ivar vector_store_ids: The IDs of the vector stores to search. Required. + :vartype vector_store_ids: list[str] + :ivar max_num_results: The maximum number of results to return. This number should be between 1 + and 50 inclusive. + :vartype max_num_results: int + :ivar ranking_options: Ranking options for search. + :vartype ranking_options: ~azure.ai.projects.models.RankingOptions + :ivar filters: A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type. + :vartype filters: ~azure.ai.projects.models.ComparisonFilter or + ~azure.ai.projects.models.CompoundFilter + """ + + type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file search tool. Always ``file_search``. Required.""" + vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The IDs of the vector stores to search. Required.""" + max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of results to return. This number should be between 1 and 50 inclusive.""" + ranking_options: Optional["_models.RankingOptions"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Ranking options for search.""" + filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type.""" + + @overload + def __init__( + self, + *, + vector_store_ids: list[str], + max_num_results: Optional[int] = None, + ranking_options: Optional["_models.RankingOptions"] = None, + filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.FILE_SEARCH # type: ignore + + +class FileSearchToolCallItemParam(ItemParam, discriminator="file_search_call"): + """The results of a file search tool call. See the + `file search guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL + :ivar queries: The queries used to search for files. Required. + :vartype queries: list[str] + :ivar results: The results of the file search tool call. + :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] + """ + + type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The queries used to search for files. Required.""" + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results of the file search tool call.""" + + @overload + def __init__( + self, + *, + queries: list[str], + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FILE_SEARCH_CALL # type: ignore + + +class FileSearchToolCallItemParamResult(_Model): + """FileSearchToolCallItemParamResult. + + :ivar file_id: The unique ID of the file. + :vartype file_id: str + :ivar text: The text that was retrieved from the file. + :vartype text: str + :ivar filename: The name of the file. + :vartype filename: str + :ivar attributes: + :vartype attributes: ~azure.ai.projects.models.VectorStoreFileAttributes + :ivar score: The relevance score of the file - a value between 0 and 1. + :vartype score: float + """ + + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the file.""" + text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text that was retrieved from the file.""" + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file.""" + attributes: Optional["_models.VectorStoreFileAttributes"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + score: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The relevance score of the file - a value between 0 and 1.""" + + @overload + def __init__( + self, + *, + file_id: Optional[str] = None, + text: Optional[str] = None, + filename: Optional[str] = None, + attributes: Optional["_models.VectorStoreFileAttributes"] = None, + score: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileSearchToolCallItemResource(ItemResource, discriminator="file_search_call"): + """The results of a file search tool call. See the + `file search guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL + :ivar status: The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], + Literal["failed"] + :vartype status: str or str or str or str or str + :ivar queries: The queries used to search for files. Required. + :vartype queries: list[str] + :ivar results: The results of the file search tool call. + :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] + """ + + type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"failed\"]""" + queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The queries used to search for files. Required.""" + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results of the file search tool call.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], + queries: list[str], + created_by: Optional["_models.CreatedBy"] = None, + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FILE_SEARCH_CALL # type: ignore + + +class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): + """FileDatasetVersion Definition. + + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. + :vartype data_uri: str + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI folder. + :vartype type: str or ~azure.ai.projects.models.URI_FOLDER + """ + + type: Literal[DatasetType.URI_FOLDER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI folder.""" + + @overload + def __init__( + self, + *, + data_uri: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = DatasetType.URI_FOLDER # type: ignore + + +class FunctionTool(Tool, discriminator="function"): + """Defines a function in your own code the model can choose to call. Learn more about `function + calling `_. + + :ivar type: The type of the function tool. Always ``function``. Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION + :ivar name: The name of the function to call. Required. + :vartype name: str + :ivar description: A description of the function. Used by the model to determine whether or not + to call the function. + :vartype description: str + :ivar parameters: A JSON schema object describing the parameters of the function. Required. + :vartype parameters: any + :ivar strict: Whether to enforce strict parameter validation. Default ``true``. Required. + :vartype strict: bool + """ + + type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the function tool. Always ``function``. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to call. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of the function. Used by the model to determine whether or not to call the + function.""" + parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON schema object describing the parameters of the function. Required.""" + strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enforce strict parameter validation. Default ``true``. Required.""" + + @overload + def __init__( + self, + *, + name: str, + parameters: Any, + strict: bool, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.FUNCTION # type: ignore + + +class FunctionToolCallItemParam(ItemParam, discriminator="function_call"): + """A tool call to run a function. See the + `function calling guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar name: The name of the function to run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments to pass to the function. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments to pass to the function. Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + name: str, + arguments: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL # type: ignore + + +class FunctionToolCallItemResource(ItemResource, discriminator="function_call"): + """A tool call to run a function. See the + `function calling guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar name: The name of the function to run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments to pass to the function. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments to pass to the function. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + name: str, + arguments: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL # type: ignore + + +class FunctionToolCallOutputItemParam(ItemParam, discriminator="function_call_output"): + """The output of a function tool call. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar output: A JSON string of the output of the function tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the function tool call. Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + output: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore + + +class FunctionToolCallOutputItemResource(ItemResource, discriminator="function_call_output"): + """The output of a function tool call. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar output: A JSON string of the output of the function tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the function tool call. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + output: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore + + +class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): + """The hosted agent definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ImageBasedHostedAgentDefinition + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.HOSTED + :ivar tools: An array of tools the hosted agent's model may call while generating a response. + You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar cpu: The CPU configuration for the hosted agent. Required. + :vartype cpu: str + :ivar memory: The memory configuration for the hosted agent. Required. + :vartype memory: str + :ivar environment_variables: Environment variables to set in the hosted agent container. + :vartype environment_variables: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + kind: Literal[AgentKind.HOSTED] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the hosted agent's model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter.""" + container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocols that the agent supports for ingress communication of the containers. Required.""" + cpu: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The CPU configuration for the hosted agent. Required.""" + memory: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The memory configuration for the hosted agent. Required.""" + environment_variables: Optional[dict[str, str]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Environment variables to set in the hosted agent container.""" + + @overload + def __init__( + self, + *, + container_protocol_versions: list["_models.ProtocolVersionRecord"], + cpu: str, + memory: str, + rai_config: Optional["_models.RaiConfig"] = None, + tools: Optional[list["_models.Tool"]] = None, + environment_variables: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.HOSTED # type: ignore + + +class HourlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Hourly"): + """Hourly recurrence schedule. + + :ivar type: Required. Hourly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.HOURLY + """ + + type: Literal[RecurrenceType.HOURLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Hourly recurrence pattern.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.HOURLY # type: ignore + + +class HumanEvaluationRuleAction(EvaluationRuleAction, discriminator="humanEvaluation"): + """Evaluation rule action for human evaluation. + + :ivar type: Required. Human evaluation. + :vartype type: str or ~azure.ai.projects.models.HUMAN_EVALUATION + :ivar template_id: Human evaluation template Id. Required. + :vartype template_id: str + """ + + type: Literal[EvaluationRuleActionType.HUMAN_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Human evaluation.""" + template_id: str = rest_field(name="templateId", visibility=["read", "create", "update", "delete", "query"]) + """Human evaluation template Id. Required.""" + + @overload + def __init__( + self, + *, + template_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluationRuleActionType.HUMAN_EVALUATION # type: ignore + + +class ImageBasedHostedAgentDefinition(HostedAgentDefinition, discriminator="hosted"): + """The image-based deployment definition for a hosted agent. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar tools: An array of tools the hosted agent's model may call while generating a response. + You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar cpu: The CPU configuration for the hosted agent. Required. + :vartype cpu: str + :ivar memory: The memory configuration for the hosted agent. Required. + :vartype memory: str + :ivar environment_variables: Environment variables to set in the hosted agent container. + :vartype environment_variables: dict[str, str] + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.HOSTED + :ivar image: The image for the hosted agent. Required. + :vartype image: str + """ + + image: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The image for the hosted agent. Required.""" + + @overload + def __init__( + self, + *, + container_protocol_versions: list["_models.ProtocolVersionRecord"], + cpu: str, + memory: str, + image: str, + rai_config: Optional["_models.RaiConfig"] = None, + tools: Optional[list["_models.Tool"]] = None, + environment_variables: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImageGenTool(Tool, discriminator="image_generation"): + """A tool that generates images using a model like ``gpt-image-1``. + + :ivar type: The type of the image generation tool. Always ``image_generation``. Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION + :ivar model: The image generation model to use. Default: ``gpt-image-1``. Default value is + "gpt-image-1". + :vartype model: str + :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, + or ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], + Literal["medium"], Literal["high"], Literal["auto"] + :vartype quality: str or str or str or str + :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``, + ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal["1024x1024"], Literal["1024x1536"], Literal["1536x1024"], Literal["auto"] + :vartype size: str or str or str or str + :ivar output_format: The output format of the generated image. One of ``png``, ``webp``, or + ``jpeg``. Default: ``png``. Is one of the following types: Literal["png"], Literal["webp"], + Literal["jpeg"] + :vartype output_format: str or str or str + :ivar output_compression: Compression level for the output image. Default: 100. + :vartype output_compression: int + :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a + Literal["auto"] type or a Literal["low"] type. + :vartype moderation: str or str + :ivar background: Background type for the generated image. One of ``transparent``, + ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal["transparent"], Literal["opaque"], Literal["auto"] + :vartype background: str or str or str + :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` + (string, optional) and ``file_id`` (string, optional). + :vartype input_image_mask: ~azure.ai.projects.models.ImageGenToolInputImageMask + :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default + value) to 3. + :vartype partial_images: int + """ + + type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the image generation tool. Always ``image_generation``. Required.""" + model: Optional[Literal["gpt-image-1"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The image generation model to use. Default: ``gpt-image-1``. Default value is \"gpt-image-1\".""" + quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The quality of the generated image. One of ``low``, ``medium``, ``high``, + or ``auto``. Default: ``auto``. Is one of the following types: Literal[\"low\"], + Literal[\"medium\"], Literal[\"high\"], Literal[\"auto\"]""" + size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The size of the generated image. One of ``1024x1024``, ``1024x1536``, + ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal[\"1024x1024\"], Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]""" + output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The output format of the generated image. One of ``png``, ``webp``, or + ``jpeg``. Default: ``png``. Is one of the following types: Literal[\"png\"], Literal[\"webp\"], + Literal[\"jpeg\"]""" + output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Compression level for the output image. Default: 100.""" + moderation: Optional[Literal["auto", "low"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Moderation level for the generated image. Default: ``auto``. Is either a Literal[\"auto\"] type + or a Literal[\"low\"] type.""" + background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Background type for the generated image. One of ``transparent``, + ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal[\"transparent\"], Literal[\"opaque\"], Literal[\"auto\"]""" + input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional mask for inpainting. Contains ``image_url`` + (string, optional) and ``file_id`` (string, optional).""" + partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Number of partial images to generate in streaming mode, from 0 (default value) to 3.""" + + @overload + def __init__( + self, + *, + model: Optional[Literal["gpt-image-1"]] = None, + quality: Optional[Literal["low", "medium", "high", "auto"]] = None, + size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = None, + output_format: Optional[Literal["png", "webp", "jpeg"]] = None, + output_compression: Optional[int] = None, + moderation: Optional[Literal["auto", "low"]] = None, + background: Optional[Literal["transparent", "opaque", "auto"]] = None, + input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = None, + partial_images: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.IMAGE_GENERATION # type: ignore + + +class ImageGenToolCallItemParam(ItemParam, discriminator="image_generation_call"): + """An image generation request made by the model. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL + :ivar result: The generated image encoded in base64. Required. + :vartype result: str + """ + + type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The generated image encoded in base64. Required.""" + + @overload + def __init__( + self, + *, + result: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore + + +class ImageGenToolCallItemResource(ItemResource, discriminator="image_generation_call"): + """An image generation request made by the model. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["generating"], Literal["failed"] + :vartype status: str or str or str or str + :ivar result: The generated image encoded in base64. Required. + :vartype result: str + """ + + type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"generating\"], Literal[\"failed\"]""" + result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The generated image encoded in base64. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "generating", "failed"], + result: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore + + +class ImageGenToolInputImageMask(_Model): + """ImageGenToolInputImageMask. + + :ivar image_url: Base64-encoded mask image. + :vartype image_url: str + :ivar file_id: File ID for the mask image. + :vartype file_id: str + """ + + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded mask image.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """File ID for the mask image.""" + + @overload + def __init__( + self, + *, + image_url: Optional[str] = None, + file_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Insight(_Model): + """The response body for cluster insights. + + :ivar id: The unique identifier for the insights report. Required. + :vartype id: str + :ivar metadata: Metadata about the insights report. Required. + :vartype metadata: ~azure.ai.projects.models.InsightsMetadata + :ivar state: The current state of the insights. Required. Known values are: "NotStarted", + "Running", "Succeeded", "Failed", and "Canceled". + :vartype state: str or ~azure.ai.projects.models.OperationState + :ivar display_name: User friendly display name for the insight. Required. + :vartype display_name: str + :ivar request: Request for the insights analysis. Required. + :vartype request: ~azure.ai.projects.models.InsightRequest + :ivar result: The result of the insights report. + :vartype result: ~azure.ai.projects.models.InsightResult + """ + + id: str = rest_field(visibility=["read"]) + """The unique identifier for the insights report. Required.""" + metadata: "_models.InsightsMetadata" = rest_field(visibility=["read"]) + """Metadata about the insights report. Required.""" + state: Union[str, "_models.OperationState"] = rest_field(visibility=["read"]) + """The current state of the insights. Required. Known values are: \"NotStarted\", \"Running\", + \"Succeeded\", \"Failed\", and \"Canceled\".""" + display_name: str = rest_field(name="displayName", visibility=["read", "create", "update", "delete", "query"]) + """User friendly display name for the insight. Required.""" + request: "_models.InsightRequest" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Request for the insights analysis. Required.""" + result: Optional["_models.InsightResult"] = rest_field(visibility=["read"]) + """The result of the insights report.""" + + @overload + def __init__( + self, + *, + display_name: str, + request: "_models.InsightRequest", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightCluster(_Model): + """A cluster of analysis samples. + + :ivar id: The id of the analysis cluster. Required. + :vartype id: str + :ivar label: Label for the cluster. Required. + :vartype label: str + :ivar suggestion: Suggestion for the cluster. Required. + :vartype suggestion: str + :ivar description: Description of the analysis cluster. Required. + :vartype description: str + :ivar weight: The weight of the analysis cluster. This indicate number of samples in the + cluster. Required. + :vartype weight: int + :ivar sub_clusters: List of subclusters within this cluster. Empty if no subclusters exist. + :vartype sub_clusters: list[~azure.ai.projects.models.InsightCluster] + :ivar samples: List of samples that belong to this cluster. Empty if samples are part of + subclusters. + :vartype samples: list[~azure.ai.projects.models.InsightSample] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The id of the analysis cluster. Required.""" + label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Label for the cluster. Required.""" + suggestion: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Suggestion for the cluster. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the analysis cluster. Required.""" + weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The weight of the analysis cluster. This indicate number of samples in the cluster. Required.""" + sub_clusters: Optional[list["_models.InsightCluster"]] = rest_field( + name="subClusters", visibility=["read", "create", "update", "delete", "query"] + ) + """List of subclusters within this cluster. Empty if no subclusters exist.""" + samples: Optional[list["_models.InsightSample"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of samples that belong to this cluster. Empty if samples are part of subclusters.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + label: str, + suggestion: str, + description: str, + weight: int, + sub_clusters: Optional[list["_models.InsightCluster"]] = None, + samples: Optional[list["_models.InsightSample"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightModelConfiguration(_Model): + """Configuration of the model used in the insight generation. + + :ivar model_deployment_name: The model deployment to be evaluated. Accepts either the + deployment name alone or with the connection name as '{connectionName}/'. + Required. + :vartype model_deployment_name: str + """ + + model_deployment_name: str = rest_field( + name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] + ) + """The model deployment to be evaluated. Accepts either the deployment name alone or with the + connection name as '{connectionName}/'. Required.""" + + @overload + def __init__( + self, + *, + model_deployment_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightScheduleTask(ScheduleTask, discriminator="Insight"): + """Insight task for the schedule. + + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + :ivar type: Required. Insight task. + :vartype type: str or ~azure.ai.projects.models.INSIGHT + :ivar insight: The insight payload. Required. + :vartype insight: ~azure.ai.projects.models.Insight + """ + + type: Literal[ScheduleTaskType.INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Insight task.""" + insight: "_models.Insight" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The insight payload. Required.""" + + @overload + def __init__( + self, + *, + insight: "_models.Insight", + configuration: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ScheduleTaskType.INSIGHT # type: ignore + + +class InsightsMetadata(_Model): + """Metadata about the insights. + + :ivar created_at: The timestamp when the insights were created. Required. + :vartype created_at: ~datetime.datetime + :ivar completed_at: The timestamp when the insights were completed. + :vartype completed_at: ~datetime.datetime + """ + + created_at: datetime.datetime = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp when the insights were created. Required.""" + completed_at: Optional[datetime.datetime] = rest_field( + name="completedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp when the insights were completed.""" + + @overload + def __init__( + self, + *, + created_at: datetime.datetime, + completed_at: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightSummary(_Model): + """Summary of the error cluster analysis. + + :ivar sample_count: Total number of samples analyzed. Required. + :vartype sample_count: int + :ivar unique_subcluster_count: Total number of unique subcluster labels. Required. + :vartype unique_subcluster_count: int + :ivar unique_cluster_count: Total number of unique clusters. Required. + :vartype unique_cluster_count: int + :ivar method: Method used for clustering. Required. + :vartype method: str + :ivar usage: Token usage while performing clustering analysis. Required. + :vartype usage: ~azure.ai.projects.models.ClusterTokenUsage + """ + + sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) + """Total number of samples analyzed. Required.""" + unique_subcluster_count: int = rest_field( + name="uniqueSubclusterCount", visibility=["read", "create", "update", "delete", "query"] + ) + """Total number of unique subcluster labels. Required.""" + unique_cluster_count: int = rest_field( + name="uniqueClusterCount", visibility=["read", "create", "update", "delete", "query"] + ) + """Total number of unique clusters. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Method used for clustering. Required.""" + usage: "_models.ClusterTokenUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Token usage while performing clustering analysis. Required.""" + + @overload + def __init__( + self, + *, + sample_count: int, + unique_subcluster_count: int, + unique_cluster_count: int, + method: str, + usage: "_models.ClusterTokenUsage", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkflowActionOutputItemResource(ItemResource, discriminator="workflow_action"): + """WorkflowActionOutputItemResource. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + InvokeAzureAgentWorkflowActionOutputItemResource + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION + :ivar kind: The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. + Default value is None. + :vartype kind: str + :ivar action_id: Unique identifier for the action. Required. + :vartype action_id: str + :ivar parent_action_id: ID of the parent action if this is a nested action. + :vartype parent_action_id: str + :ivar previous_action_id: ID of the previous action if this action follows another. + :vartype previous_action_id: str + :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). + Required. Is one of the following types: Literal["completed"], Literal["failed"], + Literal["in_progress"], Literal["cancelled"] + :vartype status: str or str or str or str + """ + + __mapping__: dict[str, _Model] = {} + type: Literal[ItemType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. Default value is + None.""" + action_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier for the action. Required.""" + parent_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """ID of the parent action if this is a nested action.""" + previous_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """ID of the previous action if this action follows another.""" + status: Literal["completed", "failed", "in_progress", "cancelled"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). Required. Is + one of the following types: Literal[\"completed\"], Literal[\"failed\"], + Literal[\"in_progress\"], Literal[\"cancelled\"]""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + kind: str, + action_id: str, + status: Literal["completed", "failed", "in_progress", "cancelled"], + created_by: Optional["_models.CreatedBy"] = None, + parent_action_id: Optional[str] = None, + previous_action_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.WORKFLOW_ACTION # type: ignore + + +class InvokeAzureAgentWorkflowActionOutputItemResource( + WorkflowActionOutputItemResource, discriminator="InvokeAzureAgent" +): # pylint: disable=name-too-long + """Details about an agent invocation as part of a workflow action. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION + :ivar action_id: Unique identifier for the action. Required. + :vartype action_id: str + :ivar parent_action_id: ID of the parent action if this is a nested action. + :vartype parent_action_id: str + :ivar previous_action_id: ID of the previous action if this action follows another. + :vartype previous_action_id: str + :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). + Required. Is one of the following types: Literal["completed"], Literal["failed"], + Literal["in_progress"], Literal["cancelled"] + :vartype status: str or str or str or str + :ivar kind: Required. Default value is "InvokeAzureAgent". + :vartype kind: str + :ivar agent: Agent id. Required. + :vartype agent: ~azure.ai.projects.models.AgentId + :ivar conversation_id: ID of the conversation for the agent invocation. + :vartype conversation_id: str + :ivar response_id: The response id for the agent invocation. Required. + :vartype response_id: str + """ + + __mapping__: dict[str, _Model] = {} + kind: Literal["InvokeAzureAgent"] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Default value is \"InvokeAzureAgent\".""" + agent: "_models.AgentId" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Agent id. Required.""" + conversation_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """ID of the conversation for the agent invocation.""" + response_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response id for the agent invocation. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + action_id: str, + status: Literal["completed", "failed", "in_progress", "cancelled"], + agent: "_models.AgentId", + response_id: str, + created_by: Optional["_models.CreatedBy"] = None, + parent_action_id: Optional[str] = None, + previous_action_id: Optional[str] = None, + conversation_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = "InvokeAzureAgent" # type: ignore + + +class ItemContent(_Model): + """ItemContent. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ItemContentInputAudio, ItemContentInputFile, ItemContentInputImage, ItemContentInputText, + ItemContentOutputAudio, ItemContentOutputText, ItemContentRefusal + + :ivar type: Required. Known values are: "input_text", "input_audio", "input_image", + "input_file", "output_text", "output_audio", and "refusal". + :vartype type: str or ~azure.ai.projects.models.ItemContentType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"input_text\", \"input_audio\", \"input_image\", \"input_file\", + \"output_text\", \"output_audio\", and \"refusal\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ItemContentInputAudio(ItemContent, discriminator="input_audio"): + """An audio input to the model. + + :ivar type: The type of the input item. Always ``input_audio``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_AUDIO + :ivar data: Base64-encoded audio data. Required. + :vartype data: str + :ivar format: The format of the audio data. Currently supported formats are ``mp3`` and + ``wav``. Required. Is either a Literal["mp3"] type or a Literal["wav"] type. + :vartype format: str or str + """ + + type: Literal[ItemContentType.INPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_audio``. Required.""" + data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded audio data. Required.""" + format: Literal["mp3", "wav"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The format of the audio data. Currently supported formats are ``mp3`` and + ``wav``. Required. Is either a Literal[\"mp3\"] type or a Literal[\"wav\"] type.""" + + @overload + def __init__( + self, + *, + data: str, + format: Literal["mp3", "wav"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_AUDIO # type: ignore + + +class ItemContentInputFile(ItemContent, discriminator="input_file"): + """A file input to the model. + + :ivar type: The type of the input item. Always ``input_file``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_FILE + :ivar file_id: The ID of the file to be sent to the model. + :vartype file_id: str + :ivar filename: The name of the file to be sent to the model. + :vartype filename: str + :ivar file_data: The content of the file to be sent to the model. + :vartype file_data: str + """ + + type: Literal[ItemContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_file``. Required.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file to be sent to the model.""" + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file to be sent to the model.""" + file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the file to be sent to the model.""" + + @overload + def __init__( + self, + *, + file_id: Optional[str] = None, + filename: Optional[str] = None, + file_data: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_FILE # type: ignore + + +class ItemContentInputImage(ItemContent, discriminator="input_image"): + """An image input to the model. Learn about `image inputs `_. + + :ivar type: The type of the input item. Always ``input_image``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE + :ivar image_url: The URL of the image to be sent to the model. A fully qualified URL or base64 + encoded image in a data URL. + :vartype image_url: str + :ivar file_id: The ID of the file to be sent to the model. + :vartype file_id: str + :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``, + or ``auto``. Defaults to ``auto``. Is one of the following types: Literal["low"], + Literal["high"], Literal["auto"] + :vartype detail: str or str or str + """ + + type: Literal[ItemContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_image``. Required.""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the image to be sent to the model. A fully qualified URL or base64 encoded image in + a data URL.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file to be sent to the model.""" + detail: Optional[Literal["low", "high", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``. + Defaults to ``auto``. Is one of the following types: Literal[\"low\"], Literal[\"high\"], + Literal[\"auto\"]""" + + @overload + def __init__( + self, + *, + image_url: Optional[str] = None, + file_id: Optional[str] = None, + detail: Optional[Literal["low", "high", "auto"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_IMAGE # type: ignore + + +class ItemContentInputText(ItemContent, discriminator="input_text"): + """A text input to the model. + + :ivar type: The type of the input item. Always ``input_text``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT + :ivar text: The text input to the model. Required. + :vartype text: str + """ + + type: Literal[ItemContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text input to the model. Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_TEXT # type: ignore + + +class ItemContentOutputAudio(ItemContent, discriminator="output_audio"): + """An audio output from the model. + + :ivar type: The type of the output audio. Always ``output_audio``. Required. + :vartype type: str or ~azure.ai.projects.models.OUTPUT_AUDIO + :ivar data: Base64-encoded audio data from the model. Required. + :vartype data: str + :ivar transcript: The transcript of the audio data from the model. Required. + :vartype transcript: str + """ + + type: Literal[ItemContentType.OUTPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output audio. Always ``output_audio``. Required.""" + data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded audio data from the model. Required.""" + transcript: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The transcript of the audio data from the model. Required.""" + + @overload + def __init__( + self, + *, + data: str, + transcript: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.OUTPUT_AUDIO # type: ignore + + +class ItemContentOutputText(ItemContent, discriminator="output_text"): + """A text output from the model. + + :ivar type: The type of the output text. Always ``output_text``. Required. + :vartype type: str or ~azure.ai.projects.models.OUTPUT_TEXT + :ivar text: The text output from the model. Required. + :vartype text: str + :ivar annotations: The annotations of the text output. Required. + :vartype annotations: list[~azure.ai.projects.models.Annotation] + :ivar logprobs: + :vartype logprobs: list[~azure.ai.projects.models.LogProb] + """ + + type: Literal[ItemContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output text. Always ``output_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text output from the model. Required.""" + annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The annotations of the text output. Required.""" + logprobs: Optional[list["_models.LogProb"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + text: str, + annotations: list["_models.Annotation"], + logprobs: Optional[list["_models.LogProb"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.OUTPUT_TEXT # type: ignore + + +class ItemContentRefusal(ItemContent, discriminator="refusal"): + """A refusal from the model. + + :ivar type: The type of the refusal. Always ``refusal``. Required. + :vartype type: str or ~azure.ai.projects.models.REFUSAL + :ivar refusal: The refusal explanationfrom the model. Required. + :vartype refusal: str + """ + + type: Literal[ItemContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the refusal. Always ``refusal``. Required.""" + refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The refusal explanationfrom the model. Required.""" + + @overload + def __init__( + self, + *, + refusal: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.REFUSAL # type: ignore + + +class ItemReferenceItemParam(ItemParam, discriminator="item_reference"): + """An internal identifier for an item to reference. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.ITEM_REFERENCE + :ivar id: The service-originated ID of the previously generated response item being referenced. + Required. + :vartype id: str + """ + + type: Literal[ItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The service-originated ID of the previously generated response item being referenced. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.ITEM_REFERENCE # type: ignore + + +class LocalShellExecAction(_Model): + """Execute a shell command on the server. + + :ivar type: The type of the local shell action. Always ``exec``. Required. Default value is + "exec". + :vartype type: str + :ivar command: The command to run. Required. + :vartype command: list[str] + :ivar timeout_ms: Optional timeout in milliseconds for the command. + :vartype timeout_ms: int + :ivar working_directory: Optional working directory to run the command in. + :vartype working_directory: str + :ivar env: Environment variables to set for the command. Required. + :vartype env: dict[str, str] + :ivar user: Optional user to run the command as. + :vartype user: str + """ + + type: Literal["exec"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the local shell action. Always ``exec``. Required. Default value is \"exec\".""" + command: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The command to run. Required.""" + timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional timeout in milliseconds for the command.""" + working_directory: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional working directory to run the command in.""" + env: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Environment variables to set for the command. Required.""" + user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional user to run the command as.""" + + @overload + def __init__( + self, + *, + command: list[str], + env: dict[str, str], + timeout_ms: Optional[int] = None, + working_directory: Optional[str] = None, + user: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["exec"] = "exec" + + +class LocalShellTool(Tool, discriminator="local_shell"): + """A tool that allows the model to execute shell commands in a local environment. + + :ivar type: The type of the local shell tool. Always ``local_shell``. Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL + """ + + type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the local shell tool. Always ``local_shell``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.LOCAL_SHELL # type: ignore + + +class LocalShellToolCallItemParam(ItemParam, discriminator="local_shell_call"): + """A tool call to run a command on the local shell. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL + :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.LocalShellExecAction + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + action: "_models.LocalShellExecAction", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL # type: ignore + + +class LocalShellToolCallItemResource(ItemResource, discriminator="local_shell_call"): + """A tool call to run a command on the local shell. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.LocalShellExecAction + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + action: "_models.LocalShellExecAction", + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL # type: ignore + + +class LocalShellToolCallOutputItemParam(ItemParam, discriminator="local_shell_call_output"): + """The output of a local shell tool call. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT + :ivar output: A JSON string of the output of the local shell tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the local shell tool call. Required.""" + + @overload + def __init__( + self, + *, + output: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore + + +class LocalShellToolCallOutputItemResource(ItemResource, discriminator="local_shell_call_output"): + """The output of a local shell tool call. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar output: A JSON string of the output of the local shell tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"]""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the local shell tool call. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + output: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore + + +class LogProb(_Model): + """The log probability of a token. + + :ivar token: Required. + :vartype token: str + :ivar logprob: Required. + :vartype logprob: float + :ivar bytes: Required. + :vartype bytes: list[int] + :ivar top_logprobs: Required. + :vartype top_logprobs: list[~azure.ai.projects.models.TopLogProb] + """ + + token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + top_logprobs: list["_models.TopLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + token: str, + logprob: float, + bytes: list[int], + top_logprobs: list["_models.TopLogProb"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagedAzureAISearchIndex(Index, discriminator="ManagedAzureSearch"): + """Managed Azure AI Search Index Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. Managed Azure Search + :vartype type: str or ~azure.ai.projects.models.MANAGED_AZURE_SEARCH + :ivar vector_store_id: Vector store id of managed index. Required. + :vartype vector_store_id: str + """ + + type: Literal[IndexType.MANAGED_AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. Managed Azure Search""" + vector_store_id: str = rest_field(name="vectorStoreId", visibility=["create"]) + """Vector store id of managed index. Required.""" + + @overload + def __init__( + self, + *, + vector_store_id: str, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = IndexType.MANAGED_AZURE_SEARCH # type: ignore + + +class MCPApprovalRequestItemParam(ItemParam, discriminator="mcp_approval_request"): + """A request for human approval of a tool invocation. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST + :ivar server_label: The label of the MCP server making the request. Required. + :vartype server_label: str + :ivar name: The name of the tool to run. Required. + :vartype name: str + :ivar arguments: A JSON string of arguments for the tool. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server making the request. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of arguments for the tool. Required.""" + + @overload + def __init__( + self, + *, + server_label: str, + name: str, + arguments: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore + + +class MCPApprovalRequestItemResource(ItemResource, discriminator="mcp_approval_request"): + """A request for human approval of a tool invocation. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST + :ivar server_label: The label of the MCP server making the request. Required. + :vartype server_label: str + :ivar name: The name of the tool to run. Required. + :vartype name: str + :ivar arguments: A JSON string of arguments for the tool. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server making the request. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of arguments for the tool. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + server_label: str, + name: str, + arguments: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore + + +class MCPApprovalResponseItemParam(ItemParam, discriminator="mcp_approval_response"): + """A response to an MCP approval request. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE + :ivar approval_request_id: The ID of the approval request being answered. Required. + :vartype approval_request_id: str + :ivar approve: Whether the request was approved. Required. + :vartype approve: bool + :ivar reason: Optional reason for the decision. + :vartype reason: str + """ + + type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the approval request being answered. Required.""" + approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the request was approved. Required.""" + reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional reason for the decision.""" + + @overload + def __init__( + self, + *, + approval_request_id: str, + approve: bool, + reason: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore + + +class MCPApprovalResponseItemResource(ItemResource, discriminator="mcp_approval_response"): + """A response to an MCP approval request. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE + :ivar approval_request_id: The ID of the approval request being answered. Required. + :vartype approval_request_id: str + :ivar approve: Whether the request was approved. Required. + :vartype approve: bool + :ivar reason: Optional reason for the decision. + :vartype reason: str + """ + + type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the approval request being answered. Required.""" + approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the request was approved. Required.""" + reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional reason for the decision.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + approval_request_id: str, + approve: bool, + created_by: Optional["_models.CreatedBy"] = None, + reason: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore + + +class MCPCallItemParam(ItemParam, discriminator="mcp_call"): + """An invocation of a tool on an MCP server. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_CALL + :ivar server_label: The label of the MCP server running the tool. Required. + :vartype server_label: str + :ivar name: The name of the tool that was run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments passed to the tool. Required. + :vartype arguments: str + :ivar output: The output from the tool call. + :vartype output: str + :ivar error: The error from the tool call, if any. + :vartype error: str + """ + + type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server running the tool. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool that was run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments passed to the tool. Required.""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output from the tool call.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error from the tool call, if any.""" + + @overload + def __init__( + self, + *, + server_label: str, + name: str, + arguments: str, + output: Optional[str] = None, + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_CALL # type: ignore + + +class MCPCallItemResource(ItemResource, discriminator="mcp_call"): + """An invocation of a tool on an MCP server. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_CALL + :ivar server_label: The label of the MCP server running the tool. Required. + :vartype server_label: str + :ivar name: The name of the tool that was run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments passed to the tool. Required. + :vartype arguments: str + :ivar output: The output from the tool call. + :vartype output: str + :ivar error: The error from the tool call, if any. + :vartype error: str + """ + + type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server running the tool. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool that was run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments passed to the tool. Required.""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output from the tool call.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error from the tool call, if any.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + server_label: str, + name: str, + arguments: str, + created_by: Optional["_models.CreatedBy"] = None, + output: Optional[str] = None, + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_CALL # type: ignore + + +class MCPListToolsItemParam(ItemParam, discriminator="mcp_list_tools"): + """A list of tools available on an MCP server. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS + :ivar server_label: The label of the MCP server. Required. + :vartype server_label: str + :ivar tools: The tools available on the server. Required. + :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] + :ivar error: Error message if the server could not list tools. + :vartype error: str + """ + + type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server. Required.""" + tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The tools available on the server. Required.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error message if the server could not list tools.""" + + @overload + def __init__( + self, + *, + server_label: str, + tools: list["_models.MCPListToolsTool"], + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_LIST_TOOLS # type: ignore + + +class MCPListToolsItemResource(ItemResource, discriminator="mcp_list_tools"): + """A list of tools available on an MCP server. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS + :ivar server_label: The label of the MCP server. Required. + :vartype server_label: str + :ivar tools: The tools available on the server. Required. + :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] + :ivar error: Error message if the server could not list tools. + :vartype error: str + """ + + type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server. Required.""" + tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The tools available on the server. Required.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error message if the server could not list tools.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + server_label: str, + tools: list["_models.MCPListToolsTool"], + created_by: Optional["_models.CreatedBy"] = None, + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_LIST_TOOLS # type: ignore + + +class MCPListToolsTool(_Model): + """A tool available on an MCP server. + + :ivar name: The name of the tool. Required. + :vartype name: str + :ivar description: The description of the tool. + :vartype description: str + :ivar input_schema: The JSON schema describing the tool's input. Required. + :vartype input_schema: any + :ivar annotations: Additional annotations about the tool. + :vartype annotations: any + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The description of the tool.""" + input_schema: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema describing the tool's input. Required.""" + annotations: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional annotations about the tool.""" + + @overload + def __init__( + self, + *, + name: str, + input_schema: Any, + description: Optional[str] = None, + annotations: Optional[Any] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPTool(Tool, discriminator="mcp"): + """Give the model access to additional tools via remote Model Context Protocol + (MCP) servers. `Learn more about MCP `_. + + :ivar type: The type of the MCP tool. Always ``mcp``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP + :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required. + :vartype server_label: str + :ivar server_url: The URL for the MCP server. Required. + :vartype server_url: str + :ivar headers: Optional HTTP headers to send to the MCP server. Use for authentication + or other purposes. + :vartype headers: dict[str, str] + :ivar allowed_tools: List of allowed tool names or a filter object. Is either a [str] type or a + MCPToolAllowedTools1 type. + :vartype allowed_tools: list[str] or ~azure.ai.projects.models.MCPToolAllowedTools1 + :ivar require_approval: Specify which of the MCP server's tools require approval. Is one of the + following types: MCPToolRequireApproval1, Literal["always"], Literal["never"] + :vartype require_approval: ~azure.ai.projects.models.MCPToolRequireApproval1 or str or str + :ivar project_connection_id: The connection ID in the project for the MCP server. The + connection stores authentication and other connection details needed to connect to the MCP + server. + :vartype project_connection_id: str + """ + + type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the MCP tool. Always ``mcp``. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A label for this MCP server, used to identify it in tool calls. Required.""" + server_url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL for the MCP server. Required.""" + headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional HTTP headers to send to the MCP server. Use for authentication + or other purposes.""" + allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of allowed tool names or a filter object. Is either a [str] type or a MCPToolAllowedTools1 + type.""" + require_approval: Optional[Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]]] = ( + rest_field(visibility=["read", "create", "update", "delete", "query"]) + ) + """Specify which of the MCP server's tools require approval. Is one of the following types: + MCPToolRequireApproval1, Literal[\"always\"], Literal[\"never\"]""" + project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The connection ID in the project for the MCP server. The connection stores authentication and + other connection details needed to connect to the MCP server.""" + + @overload + def __init__( + self, + *, + server_label: str, + server_url: str, + headers: Optional[dict[str, str]] = None, + allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = None, + require_approval: Optional[ + Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]] + ] = None, + project_connection_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.MCP # type: ignore + + +class MCPToolAllowedTools1(_Model): + """MCPToolAllowedTools1. + + :ivar tool_names: List of allowed tool names. + :vartype tool_names: list[str] + """ + + tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of allowed tool names.""" + + @overload + def __init__( + self, + *, + tool_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPToolRequireApproval1(_Model): + """MCPToolRequireApproval1. + + :ivar always: A list of tools that always require approval. + :vartype always: ~azure.ai.projects.models.MCPToolRequireApprovalAlways + :ivar never: A list of tools that never require approval. + :vartype never: ~azure.ai.projects.models.MCPToolRequireApprovalNever + """ + + always: Optional["_models.MCPToolRequireApprovalAlways"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of tools that always require approval.""" + never: Optional["_models.MCPToolRequireApprovalNever"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of tools that never require approval.""" + + @overload + def __init__( + self, + *, + always: Optional["_models.MCPToolRequireApprovalAlways"] = None, + never: Optional["_models.MCPToolRequireApprovalNever"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPToolRequireApprovalAlways(_Model): + """MCPToolRequireApprovalAlways. + + :ivar tool_names: List of tools that require approval. + :vartype tool_names: list[str] + """ + + tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of tools that require approval.""" + + @overload + def __init__( + self, + *, + tool_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPToolRequireApprovalNever(_Model): + """MCPToolRequireApprovalNever. + + :ivar tool_names: List of tools that do not require approval. + :vartype tool_names: list[str] + """ + + tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of tools that do not require approval.""" + + @overload + def __init__( + self, + *, + tool_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryOperation(_Model): + """Represents a single memory operation (create, update, or delete) performed on a memory item. + + :ivar kind: The type of memory operation being performed. Required. Known values are: "create", + "update", and "delete". + :vartype kind: str or ~azure.ai.projects.models.MemoryOperationKind + :ivar memory_item: The memory item to create, update, or delete. Required. + :vartype memory_item: ~azure.ai.projects.models.MemoryItem + """ + + kind: Union[str, "_models.MemoryOperationKind"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of memory operation being performed. Required. Known values are: \"create\", + \"update\", and \"delete\".""" + memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The memory item to create, update, or delete. Required.""" + + @overload + def __init__( + self, + *, + kind: Union[str, "_models.MemoryOperationKind"], + memory_item: "_models.MemoryItem", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemorySearchItem(_Model): + """A retrieved memory item from memory search. + + :ivar memory_item: Retrieved memory item. Required. + :vartype memory_item: ~azure.ai.projects.models.MemoryItem + """ + + memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Retrieved memory item. Required.""" + + @overload + def __init__( + self, + *, + memory_item: "_models.MemoryItem", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemorySearchOptions(_Model): + """Memory search options. + + :ivar max_memories: Maximum number of memory items to return. + :vartype max_memories: int + """ + + max_memories: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Maximum number of memory items to return.""" + + @overload + def __init__( + self, + *, + max_memories: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemorySearchTool(Tool, discriminator="memory_search"): + """A tool for integrating memories into the agent. + + :ivar type: The type of the tool. Always ``memory_search``. Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH + :ivar memory_store_name: The name of the memory store to use. Required. + :vartype memory_store_name: str + :ivar scope: The namespace used to group and isolate memories, such as a user ID. + Limits which memories can be retrieved or updated. + Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required. + :vartype scope: str + :ivar search_options: Options for searching the memory store. + :vartype search_options: ~azure.ai.projects.models.MemorySearchOptions + :ivar update_delay: The amount of time to wait after inactivity before updating memories with + messages from the call (e.g., '0s', '5m'). Defaults to '60s'. + :vartype update_delay: ~datetime.timedelta + """ + + type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``memory_search``. Required.""" + memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store to use. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace used to group and isolate memories, such as a user ID. + Limits which memories can be retrieved or updated. + Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required.""" + search_options: Optional["_models.MemorySearchOptions"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Options for searching the memory store.""" + update_delay: Optional[datetime.timedelta] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The amount of time to wait after inactivity before updating memories with messages from the + call (e.g., '0s', '5m'). Defaults to '60s'.""" + + @overload + def __init__( + self, + *, + memory_store_name: str, + scope: str, + search_options: Optional["_models.MemorySearchOptions"] = None, + update_delay: Optional[datetime.timedelta] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.MEMORY_SEARCH # type: ignore + + +class MemorySearchToolCallItemParam(ItemParam, discriminator="memory_search_call"): + """MemorySearchToolCallItemParam. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL + :ivar results: The results returned from the memory search. + :vartype results: list[~azure.ai.projects.models.MemorySearchItem] + """ + + type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + results: Optional[list["_models.MemorySearchItem"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results returned from the memory search.""" + + @overload + def __init__( + self, + *, + results: Optional[list["_models.MemorySearchItem"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore + + +class MemorySearchToolCallItemResource(ItemResource, discriminator="memory_search_call"): + """MemorySearchToolCallItemResource. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL + :ivar status: The status of the memory search tool call. One of ``in_progress``, + ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following + types: Literal["in_progress"], Literal["searching"], Literal["completed"], + Literal["incomplete"], Literal["failed"] + :vartype status: str or str or str or str or str + :ivar results: The results returned from the memory search. + :vartype results: list[~azure.ai.projects.models.MemorySearchItem] + """ + + type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the memory search tool call. One of ``in_progress``, + ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"failed\"]""" + results: Optional[list["_models.MemorySearchItem"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results returned from the memory search.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], + created_by: Optional["_models.CreatedBy"] = None, + results: Optional[list["_models.MemorySearchItem"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore + + +class MemoryStoreDefinition(_Model): + """Base definition for memory store configurations. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MemoryStoreDefaultDefinition + + :ivar kind: The kind of the memory store. Required. "default" + :vartype kind: str or ~azure.ai.projects.models.MemoryStoreKind + """ + + __mapping__: dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """The kind of the memory store. Required. \"default\"""" + + @overload + def __init__( + self, + *, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreDefaultDefinition(MemoryStoreDefinition, discriminator="default"): + """Default memory store implementation. + + :ivar kind: The kind of the memory store. Required. The default memory store implementation. + :vartype kind: str or ~azure.ai.projects.models.DEFAULT + :ivar chat_model: The name or identifier of the chat completion model deployment used for + memory processing. Required. + :vartype chat_model: str + :ivar embedding_model: The name or identifier of the embedding model deployment used for memory + processing. Required. + :vartype embedding_model: str + :ivar options: Default memory store options. + :vartype options: ~azure.ai.projects.models.MemoryStoreDefaultOptions + """ + + kind: Literal[MemoryStoreKind.DEFAULT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind of the memory store. Required. The default memory store implementation.""" + chat_model: str = rest_field(visibility=["read", "create"]) + """The name or identifier of the chat completion model deployment used for memory processing. + Required.""" + embedding_model: str = rest_field(visibility=["read", "create"]) + """The name or identifier of the embedding model deployment used for memory processing. Required.""" + options: Optional["_models.MemoryStoreDefaultOptions"] = rest_field(visibility=["read", "create"]) + """Default memory store options.""" + + @overload + def __init__( + self, + *, + chat_model: str, + embedding_model: str, + options: Optional["_models.MemoryStoreDefaultOptions"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = MemoryStoreKind.DEFAULT # type: ignore + + +class MemoryStoreDefaultOptions(_Model): + """Default memory store configurations. + + :ivar user_profile_enabled: Whether to enable user profile extraction and storage. Default is + true. Required. + :vartype user_profile_enabled: bool + :ivar user_profile_details: Specific categories or types of user profile information to extract + and store. + :vartype user_profile_details: str + :ivar chat_summary_enabled: Whether to enable chat summary extraction and storage. Default is + true. Required. + :vartype chat_summary_enabled: bool + """ + + user_profile_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable user profile extraction and storage. Default is true. Required.""" + user_profile_details: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specific categories or types of user profile information to extract and store.""" + chat_summary_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable chat summary extraction and storage. Default is true. Required.""" + + @overload + def __init__( + self, + *, + user_profile_enabled: bool, + chat_summary_enabled: bool, + user_profile_details: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreDeleteScopeResponse(_Model): + """Response for deleting memories from a scope. + + :ivar object: The object type. Always 'memory_store.scope.deleted'. Required. Default value is + "memory_store.scope.deleted". + :vartype object: str + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar scope: The scope from which memories were deleted. Required. + :vartype scope: str + :ivar deleted: Whether the deletion operation was successful. Required. + :vartype deleted: bool + """ + + object: Literal["memory_store.scope.deleted"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type. Always 'memory_store.scope.deleted'. Required. Default value is + \"memory_store.scope.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The scope from which memories were deleted. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the deletion operation was successful. Required.""" + + @overload + def __init__( + self, + *, + name: str, + scope: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["memory_store.scope.deleted"] = "memory_store.scope.deleted" + + +class MemoryStoreObject(_Model): + """A memory store that can store and retrieve user memories. + + :ivar object: The object type, which is always 'memory_store'. Required. Default value is + "memory_store". + :vartype object: str + :ivar id: The unique identifier of the memory store. Required. + :vartype id: str + :ivar created_at: The Unix timestamp (seconds) when the memory store was created. Required. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The Unix timestamp (seconds) when the memory store was last updated. + Required. + :vartype updated_at: ~datetime.datetime + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar description: A human-readable description of the memory store. + :vartype description: str + :ivar metadata: Arbitrary key-value metadata to associate with the memory store. + :vartype metadata: dict[str, str] + :ivar definition: The definition of the memory store. Required. + :vartype definition: ~azure.ai.projects.models.MemoryStoreDefinition + """ + + object: Literal["memory_store"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'memory_store'. Required. Default value is \"memory_store\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the memory store. Required.""" + created_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the memory store was created. Required.""" + updated_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the memory store was last updated. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the memory store.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Arbitrary key-value metadata to associate with the memory store.""" + definition: "_models.MemoryStoreDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The definition of the memory store. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + created_at: datetime.datetime, + updated_at: datetime.datetime, + name: str, + definition: "_models.MemoryStoreDefinition", + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["memory_store"] = "memory_store" + + +class MemoryStoreOperationUsage(_Model): + """Usage statistics of a memory store operation. + + :ivar embedding_tokens: The number of embedding tokens. Required. + :vartype embedding_tokens: int + :ivar input_tokens: The number of input tokens. Required. + :vartype input_tokens: int + :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. + :vartype input_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails + :ivar output_tokens: The number of output tokens. Required. + :vartype output_tokens: int + :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. + :vartype output_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails + :ivar total_tokens: The total number of tokens used. Required. + :vartype total_tokens: int + """ + + embedding_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of embedding tokens. Required.""" + input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of input tokens. Required.""" + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the input tokens. Required.""" + output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of output tokens. Required.""" + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the output tokens. Required.""" + total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The total number of tokens used. Required.""" + + @overload + def __init__( + self, + *, + embedding_tokens: int, + input_tokens: int, + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", + output_tokens: int, + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", + total_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreOperationUsageInputTokensDetails(_Model): # pylint: disable=name-too-long + """MemoryStoreOperationUsageInputTokensDetails. + + :ivar cached_tokens: The number of tokens that were retrieved from the cache. + `More on prompt caching `_. Required. + :vartype cached_tokens: int + """ + + cached_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of tokens that were retrieved from the cache. + `More on prompt caching `_. Required.""" + + @overload + def __init__( + self, + *, + cached_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreOperationUsageOutputTokensDetails(_Model): # pylint: disable=name-too-long + """MemoryStoreOperationUsageOutputTokensDetails. + + :ivar reasoning_tokens: The number of reasoning tokens. Required. + :vartype reasoning_tokens: int + """ + + reasoning_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of reasoning tokens. Required.""" + + @overload + def __init__( + self, + *, + reasoning_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreSearchResponse(_Model): + """Memory search response. + + :ivar search_id: The unique ID of this search request. Use this value as previous_search_id in + subsequent requests to perform incremental searches. Required. + :vartype search_id: str + :ivar memories: Related memory items found during the search operation. Required. + :vartype memories: list[~azure.ai.projects.models.MemorySearchItem] + :ivar usage: Usage statistics associated with the memory search operation. Required. + :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage + """ + + search_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of this search request. Use this value as previous_search_id in subsequent + requests to perform incremental searches. Required.""" + memories: list["_models.MemorySearchItem"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Related memory items found during the search operation. Required.""" + usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Usage statistics associated with the memory search operation. Required.""" + + @overload + def __init__( + self, + *, + search_id: str, + memories: list["_models.MemorySearchItem"], + usage: "_models.MemoryStoreOperationUsage", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreUpdateResponse(_Model): + """Provides the status of a memory store update operation. + + :ivar update_id: The unique ID of this update request. Use this value as previous_update_id in + subsequent requests to perform incremental updates. Required. + :vartype update_id: str + :ivar status: The status of the memory update operation. One of "queued", "in_progress", + "completed", "failed", or "superseded". Required. Known values are: "queued", "in_progress", + "completed", "failed", and "superseded". + :vartype status: str or ~azure.ai.projects.models.MemoryStoreUpdateStatus + :ivar superseded_by: The update_id the operation was superseded by when status is "superseded". + :vartype superseded_by: str + :ivar result: The result of memory store update operation when status is "completed". + :vartype result: ~azure.ai.projects.models.MemoryStoreUpdateResult + :ivar error: Error object that describes the error when status is "failed". + :vartype error: ~azure.ai.projects.models.ApiError + """ + + update_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of this update request. Use this value as previous_update_id in subsequent + requests to perform incremental updates. Required.""" + status: Union[str, "_models.MemoryStoreUpdateStatus"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the memory update operation. One of \"queued\", \"in_progress\", \"completed\", + \"failed\", or \"superseded\". Required. Known values are: \"queued\", \"in_progress\", + \"completed\", \"failed\", and \"superseded\".""" + superseded_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The update_id the operation was superseded by when status is \"superseded\".""" + result: Optional["_models.MemoryStoreUpdateResult"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The result of memory store update operation when status is \"completed\".""" + error: Optional["_models.ApiError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error object that describes the error when status is \"failed\".""" + + @overload + def __init__( + self, + *, + update_id: str, + status: Union[str, "_models.MemoryStoreUpdateStatus"], + superseded_by: Optional[str] = None, + result: Optional["_models.MemoryStoreUpdateResult"] = None, + error: Optional["_models.ApiError"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreUpdateResult(_Model): + """Memory update result. + + :ivar memory_operations: A list of individual memory operations that were performed during the + update. Required. + :vartype memory_operations: list[~azure.ai.projects.models.MemoryOperation] + :ivar usage: Usage statistics associated with the memory update operation. Required. + :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage + """ + + memory_operations: list["_models.MemoryOperation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of individual memory operations that were performed during the update. Required.""" + usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Usage statistics associated with the memory update operation. Required.""" + + @overload + def __init__( + self, + *, + memory_operations: list["_models.MemoryOperation"], + usage: "_models.MemoryStoreOperationUsage", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MicrosoftFabricAgentTool(Tool, discriminator="fabric_dataagent_preview"): + """The input definition information for a Microsoft Fabric tool as used to configure an agent. + + :ivar type: The object type, which is always 'fabric_dataagent'. Required. + :vartype type: str or ~azure.ai.projects.models.FABRIC_DATAAGENT_PREVIEW + :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required. + :vartype fabric_dataagent_preview: ~azure.ai.projects.models.FabricDataAgentToolParameters + """ + + type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'fabric_dataagent'. Required.""" + fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The fabric data agent tool parameters. Required.""" + + @overload + def __init__( + self, + *, + fabric_dataagent_preview: "_models.FabricDataAgentToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.FABRIC_DATAAGENT_PREVIEW # type: ignore + + +class ModelDeployment(Deployment, discriminator="ModelDeployment"): + """Model Deployment Definition. + + :ivar name: Name of the deployment. Required. + :vartype name: str + :ivar type: The type of the deployment. Required. Model deployment + :vartype type: str or ~azure.ai.projects.models.MODEL_DEPLOYMENT + :ivar model_name: Publisher-specific name of the deployed model. Required. + :vartype model_name: str + :ivar model_version: Publisher-specific version of the deployed model. Required. + :vartype model_version: str + :ivar model_publisher: Name of the deployed model's publisher. Required. + :vartype model_publisher: str + :ivar capabilities: Capabilities of deployed model. Required. + :vartype capabilities: dict[str, str] + :ivar sku: Sku of the model deployment. Required. + :vartype sku: ~azure.ai.projects.models.ModelDeploymentSku + :ivar connection_name: Name of the connection the deployment comes from. + :vartype connection_name: str + """ + + type: Literal[DeploymentType.MODEL_DEPLOYMENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the deployment. Required. Model deployment""" + model_name: str = rest_field(name="modelName", visibility=["read"]) + """Publisher-specific name of the deployed model. Required.""" + model_version: str = rest_field(name="modelVersion", visibility=["read"]) + """Publisher-specific version of the deployed model. Required.""" + model_publisher: str = rest_field(name="modelPublisher", visibility=["read"]) + """Name of the deployed model's publisher. Required.""" + capabilities: dict[str, str] = rest_field(visibility=["read"]) + """Capabilities of deployed model. Required.""" + sku: "_models.ModelDeploymentSku" = rest_field(visibility=["read"]) + """Sku of the model deployment. Required.""" + connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read"]) + """Name of the connection the deployment comes from.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = DeploymentType.MODEL_DEPLOYMENT # type: ignore + + +class ModelDeploymentSku(_Model): + """Sku information. + + :ivar capacity: Sku capacity. Required. + :vartype capacity: int + :ivar family: Sku family. Required. + :vartype family: str + :ivar name: Sku name. Required. + :vartype name: str + :ivar size: Sku size. Required. + :vartype size: str + :ivar tier: Sku tier. Required. + :vartype tier: str + """ + + capacity: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku capacity. Required.""" + family: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku family. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku name. Required.""" + size: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku size. Required.""" + tier: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku tier. Required.""" + + @overload + def __init__( + self, + *, + capacity: int, + family: str, + name: str, + size: str, + tier: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MonthlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Monthly"): + """Monthly recurrence schedule. + + :ivar type: Monthly recurrence type. Required. Monthly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.MONTHLY + :ivar days_of_month: Days of the month for the recurrence schedule. Required. + :vartype days_of_month: list[int] + """ + + type: Literal[RecurrenceType.MONTHLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Monthly recurrence type. Required. Monthly recurrence pattern.""" + days_of_month: list[int] = rest_field( + name="daysOfMonth", visibility=["read", "create", "update", "delete", "query"] + ) + """Days of the month for the recurrence schedule. Required.""" + + @overload + def __init__( + self, + *, + days_of_month: list[int], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.MONTHLY # type: ignore + + +class NoAuthenticationCredentials(BaseCredentials, discriminator="None"): + """Credentials that do not require authentication. + + :ivar type: The credential type. Required. No credential + :vartype type: str or ~azure.ai.projects.models.NONE + """ + + type: Literal[CredentialType.NONE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. No credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.NONE # type: ignore + + +class OAuthConsentRequestItemResource(ItemResource, discriminator="oauth_consent_request"): + """Request from the service for the user to perform OAuth consent. + + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar id: Required. + :vartype id: str + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.OAUTH_CONSENT_REQUEST + :ivar consent_link: The link the user can use to perform OAuth consent. Required. + :vartype consent_link: str + :ivar server_label: The server label for the OAuth consent request. Required. + :vartype server_label: str + """ + + type: Literal[ItemType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The link the user can use to perform OAuth consent. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The server label for the OAuth consent request. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + consent_link: str, + server_label: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.OAUTH_CONSENT_REQUEST # type: ignore + + +class OneTimeTrigger(Trigger, discriminator="OneTime"): + """One-time trigger. + + :ivar type: Required. One-time trigger. + :vartype type: str or ~azure.ai.projects.models.ONE_TIME + :ivar trigger_at: Date and time for the one-time trigger in ISO 8601 format. Required. + :vartype trigger_at: str + :ivar time_zone: Time zone for the one-time trigger. + :vartype time_zone: str + """ + + type: Literal[TriggerType.ONE_TIME] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. One-time trigger.""" + trigger_at: str = rest_field(name="triggerAt", visibility=["read", "create", "update", "delete", "query"]) + """Date and time for the one-time trigger in ISO 8601 format. Required.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the one-time trigger.""" + + @overload + def __init__( + self, + *, + trigger_at: str, + time_zone: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = TriggerType.ONE_TIME # type: ignore + + +class OpenApiAgentTool(Tool, discriminator="openapi"): + """The input definition information for an OpenAPI tool as used to configure an agent. + + :ivar type: The object type, which is always 'openapi'. Required. + :vartype type: str or ~azure.ai.projects.models.OPENAPI + :ivar openapi: The openapi function definition. Required. + :vartype openapi: ~azure.ai.projects.models.OpenApiFunctionDefinition + """ + + type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'openapi'. Required.""" + openapi: "_models.OpenApiFunctionDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The openapi function definition. Required.""" + + @overload + def __init__( + self, + *, + openapi: "_models.OpenApiFunctionDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.OPENAPI # type: ignore + + +class OpenApiAuthDetails(_Model): + """authentication details for OpenApiFunctionDefinition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + OpenApiAnonymousAuthDetails, OpenApiManagedAuthDetails, OpenApiProjectConnectionAuthDetails + + :ivar type: The type of authentication, must be anonymous/project_connection/managed_identity. + Required. Known values are: "anonymous", "project_connection", and "managed_identity". + :vartype type: str or ~azure.ai.projects.models.OpenApiAuthType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of authentication, must be anonymous/project_connection/managed_identity. Required. + Known values are: \"anonymous\", \"project_connection\", and \"managed_identity\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"): + """Security details for OpenApi anonymous authentication. + + :ivar type: The object type, which is always 'anonymous'. Required. + :vartype type: str or ~azure.ai.projects.models.ANONYMOUS + """ + + type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'anonymous'. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = OpenApiAuthType.ANONYMOUS # type: ignore + + +class OpenApiFunctionDefinition(_Model): + """The input definition information for an openapi function. + + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar spec: The openapi function shape, described as a JSON Schema object. Required. + :vartype spec: any + :ivar auth: Open API authentication details. Required. + :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails + :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. + :vartype default_params: list[str] + :ivar functions: List of function definitions used by OpenApi tool. + :vartype functions: list[~azure.ai.projects.models.OpenApiFunctionDefinitionFunction] + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + spec: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The openapi function shape, described as a JSON Schema object. Required.""" + auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Open API authentication details. Required.""" + default_params: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of OpenAPI spec parameters that will use user-provided defaults.""" + functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of function definitions used by OpenApi tool.""" + + @overload + def __init__( + self, + *, + name: str, + spec: Any, + auth: "_models.OpenApiAuthDetails", + description: Optional[str] = None, + default_params: Optional[list[str]] = None, + functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiFunctionDefinitionFunction(_Model): + """OpenApiFunctionDefinitionFunction. + + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. + Required. + :vartype parameters: any + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The parameters the functions accepts, described as a JSON Schema object. Required.""" + + @overload + def __init__( + self, + *, + name: str, + parameters: Any, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"): + """Security details for OpenApi managed_identity authentication. + + :ivar type: The object type, which is always 'managed_identity'. Required. + :vartype type: str or ~azure.ai.projects.models.MANAGED_IDENTITY + :ivar security_scheme: Connection auth security details. Required. + :vartype security_scheme: ~azure.ai.projects.models.OpenApiManagedSecurityScheme + """ + + type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'managed_identity'. Required.""" + security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Connection auth security details. Required.""" + + @overload + def __init__( + self, + *, + security_scheme: "_models.OpenApiManagedSecurityScheme", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = OpenApiAuthType.MANAGED_IDENTITY # type: ignore + + +class OpenApiManagedSecurityScheme(_Model): + """Security scheme for OpenApi managed_identity authentication. + + :ivar audience: Authentication scope for managed_identity auth type. Required. + :vartype audience: str + """ + + audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Authentication scope for managed_identity auth type. Required.""" + + @overload + def __init__( + self, + *, + audience: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="project_connection"): + """Security details for OpenApi project connection authentication. + + :ivar type: The object type, which is always 'project_connection'. Required. + :vartype type: str or ~azure.ai.projects.models.PROJECT_CONNECTION + :ivar security_scheme: Project connection auth security details. Required. + :vartype security_scheme: ~azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme + """ + + type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'project_connection'. Required.""" + security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Project connection auth security details. Required.""" + + @overload + def __init__( + self, + *, + security_scheme: "_models.OpenApiProjectConnectionSecurityScheme", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = OpenApiAuthType.PROJECT_CONNECTION # type: ignore + + +class OpenApiProjectConnectionSecurityScheme(_Model): + """Security scheme for OpenApi managed_identity authentication. + + :ivar project_connection_id: Project connection id for Project Connection auth type. Required. + :vartype project_connection_id: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for Project Connection auth type. Required.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PagedScheduleRun(_Model): + """Paged collection of ScheduleRun items. + + :ivar value: The ScheduleRun items on this page. Required. + :vartype value: list[~azure.ai.projects.models.ScheduleRun] + :ivar next_link: The link to the next page of items. + :vartype next_link: str + """ + + value: list["_models.ScheduleRun"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ScheduleRun items on this page. Required.""" + next_link: Optional[str] = rest_field(name="nextLink", visibility=["read", "create", "update", "delete", "query"]) + """The link to the next page of items.""" + + @overload + def __init__( + self, + *, + value: list["_models.ScheduleRun"], + next_link: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PendingUploadRequest(_Model): + """Represents a request for a pending upload. + + :ivar pending_upload_id: If PendingUploadId is not provided, a random GUID will be used. + :vartype pending_upload_id: str + :ivar connection_name: Azure Storage Account connection name to use for generating temporary + SAS token. + :vartype connection_name: str + :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference + is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE + """ + + pending_upload_id: Optional[str] = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """If PendingUploadId is not provided, a random GUID will be used.""" + connection_name: Optional[str] = rest_field( + name="connectionName", visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Storage Account connection name to use for generating temporary SAS token.""" + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + ) + """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" + + @overload + def __init__( + self, + *, + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], + pending_upload_id: Optional[str] = None, + connection_name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PendingUploadResponse(_Model): + """Represents the response for a pending upload request. + + :ivar blob_reference: Container-level read, write, list SAS. Required. + :vartype blob_reference: ~azure.ai.projects.models.BlobReference + :ivar pending_upload_id: ID for this upload request. Required. + :vartype pending_upload_id: str + :ivar version: Version of asset to be created if user did not specify version when initially + creating upload. + :vartype version: str + :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference + is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE + """ + + blob_reference: "_models.BlobReference" = rest_field( + name="blobReference", visibility=["read", "create", "update", "delete", "query"] + ) + """Container-level read, write, list SAS. Required.""" + pending_upload_id: str = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """ID for this upload request. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Version of asset to be created if user did not specify version when initially creating upload.""" + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + ) + """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" + + @overload + def __init__( + self, + *, + blob_reference: "_models.BlobReference", + pending_upload_id: str, + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], + version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Prompt(_Model): + """Reference to a prompt template and its variables. + `Learn more `_. + + :ivar id: The unique identifier of the prompt template to use. Required. + :vartype id: str + :ivar version: Optional version of the prompt template. + :vartype version: str + :ivar variables: + :vartype variables: ~azure.ai.projects.models.ResponsePromptVariables + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the prompt template to use. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional version of the prompt template.""" + variables: Optional["_models.ResponsePromptVariables"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + version: Optional[str] = None, + variables: Optional["_models.ResponsePromptVariables"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): + """The prompt agent definition. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.PROMPT + :ivar model: The model deployment to use for this agent. Required. + :vartype model: str + :ivar instructions: A system (or developer) message inserted into the model's context. + :vartype instructions: str + :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 + will make the output more random, while lower values like 0.2 will make it more focused and + deterministic. + We generally recommend altering this or ``top_p`` but not both. + :vartype temperature: float + :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + We generally recommend altering this or ``temperature`` but not both. + :vartype top_p: float + :ivar reasoning: + :vartype reasoning: ~azure.ai.projects.models.Reasoning + :ivar tools: An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar text: Configuration options for a text response from the model. Can be plain text or + structured JSON data. + :vartype text: ~azure.ai.projects.models.PromptAgentDefinitionText + :ivar structured_inputs: Set of structured inputs that can participate in prompt template + substitution or tool argument bindings. + :vartype structured_inputs: dict[str, ~azure.ai.projects.models.StructuredInputDefinition] + """ + + kind: Literal[AgentKind.PROMPT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + model: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The model deployment to use for this agent. Required.""" + instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A system (or developer) message inserted into the model's context.""" + temperature: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output + more random, while lower values like 0.2 will make it more focused and deterministic. + We generally recommend altering this or ``top_p`` but not both.""" + top_p: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + We generally recommend altering this or ``temperature`` but not both.""" + reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter.""" + text: Optional["_models.PromptAgentDefinitionText"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration options for a text response from the model. Can be plain text or structured JSON + data.""" + structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Set of structured inputs that can participate in prompt template substitution or tool argument + bindings.""" + + @overload + def __init__( + self, + *, + model: str, + rai_config: Optional["_models.RaiConfig"] = None, + instructions: Optional[str] = None, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + reasoning: Optional["_models.Reasoning"] = None, + tools: Optional[list["_models.Tool"]] = None, + text: Optional["_models.PromptAgentDefinitionText"] = None, + structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.PROMPT # type: ignore + + +class PromptAgentDefinitionText(_Model): + """PromptAgentDefinitionText. + + :ivar format: + :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration + """ + + format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + + @overload + def __init__( + self, + *, + format: Optional["_models.ResponseTextFormatConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PromptBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="prompt"): + """Prompt-based evaluator. + + :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. + This includes parameters like type, properties, required. + :vartype init_parameters: any + :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This + includes parameters like type, properties, required. + :vartype data_schema: any + :ivar metrics: List of output metrics produced by this evaluator. + :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] + :ivar type: Required. Prompt-based definition + :vartype type: str or ~azure.ai.projects.models.PROMPT + :ivar prompt_text: The prompt text used for evaluation. Required. + :vartype prompt_text: str + """ + + type: Literal[EvaluatorDefinitionType.PROMPT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Prompt-based definition""" + prompt_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The prompt text used for evaluation. Required.""" + + @overload + def __init__( + self, + *, + prompt_text: str, + init_parameters: Optional[Any] = None, + data_schema: Optional[Any] = None, + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluatorDefinitionType.PROMPT # type: ignore + + +class ProtocolVersionRecord(_Model): + """A record mapping for a single protocol and its version. + + :ivar protocol: The protocol type. Required. Known values are: "activity_protocol" and + "responses". + :vartype protocol: str or ~azure.ai.projects.models.AgentProtocol + :ivar version: The version string for the protocol, e.g. 'v0.1.1'. Required. + :vartype version: str + """ + + protocol: Union[str, "_models.AgentProtocol"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocol type. Required. Known values are: \"activity_protocol\" and \"responses\".""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version string for the protocol, e.g. 'v0.1.1'. Required.""" + + @overload + def __init__( + self, + *, + protocol: Union[str, "_models.AgentProtocol"], + version: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RaiConfig(_Model): + """Configuration for Responsible AI (RAI) content filtering and safety features. + + :ivar rai_policy_name: The name of the RAI policy to apply. Required. + :vartype rai_policy_name: str + """ + + rai_policy_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the RAI policy to apply. Required.""" + + @overload + def __init__( + self, + *, + rai_policy_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RankingOptions(_Model): + """RankingOptions. + + :ivar ranker: The ranker to use for the file search. Is either a Literal["auto"] type or a + Literal["default-2024-11-15"] type. + :vartype ranker: str or str + :ivar score_threshold: The score threshold for the file search, a number between 0 and 1. + Numbers closer to 1 will attempt to return only the most relevant results, but may return fewer + results. + :vartype score_threshold: float + """ + + ranker: Optional[Literal["auto", "default-2024-11-15"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The ranker to use for the file search. Is either a Literal[\"auto\"] type or a + Literal[\"default-2024-11-15\"] type.""" + score_threshold: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The score threshold for the file search, a number between 0 and 1. Numbers closer to 1 will + attempt to return only the most relevant results, but may return fewer results.""" + + @overload + def __init__( + self, + *, + ranker: Optional[Literal["auto", "default-2024-11-15"]] = None, + score_threshold: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Reasoning(_Model): + """**o-series models only** + Configuration options for + `reasoning models `_. + + :ivar effort: Known values are: "low", "medium", and "high". + :vartype effort: str or ~azure.ai.projects.models.ReasoningEffort + :ivar summary: A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal["auto"], + Literal["concise"], Literal["detailed"] + :vartype summary: str or str or str + :ivar generate_summary: **Deprecated:** use ``summary`` instead. + A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal["auto"], + Literal["concise"], Literal["detailed"] + :vartype generate_summary: str or str or str + """ + + effort: Optional[Union[str, "_models.ReasoningEffort"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Known values are: \"low\", \"medium\", and \"high\".""" + summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: + Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" + generate_summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """**Deprecated:** use ``summary`` instead. + A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: + Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" + + @overload + def __init__( + self, + *, + effort: Optional[Union[str, "_models.ReasoningEffort"]] = None, + summary: Optional[Literal["auto", "concise", "detailed"]] = None, + generate_summary: Optional[Literal["auto", "concise", "detailed"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ReasoningItemParam(ItemParam, discriminator="reasoning"): + """A description of the chain of thought used by a reasoning model while generating + a response. Be sure to include these items in your ``input`` to the Responses API + for subsequent turns of a conversation if you are manually + `managing context `_. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.REASONING + :ivar encrypted_content: The encrypted content of the reasoning item - populated when a + response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter. + :vartype encrypted_content: str + :ivar summary: Reasoning text contents. Required. + :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] + """ + + type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The encrypted content of the reasoning item - populated when a response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" + summary: list["_models.ReasoningItemSummaryPart"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Reasoning text contents. Required.""" + + @overload + def __init__( + self, + *, + summary: list["_models.ReasoningItemSummaryPart"], + encrypted_content: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.REASONING # type: ignore + + +class ReasoningItemResource(ItemResource, discriminator="reasoning"): + """A description of the chain of thought used by a reasoning model while generating + a response. Be sure to include these items in your ``input`` to the Responses API + for subsequent turns of a conversation if you are manually + `managing context `_. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.REASONING + :ivar encrypted_content: The encrypted content of the reasoning item - populated when a + response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter. + :vartype encrypted_content: str + :ivar summary: Reasoning text contents. Required. + :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] + """ + + type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The encrypted content of the reasoning item - populated when a response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" + summary: list["_models.ReasoningItemSummaryPart"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Reasoning text contents. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + summary: list["_models.ReasoningItemSummaryPart"], + created_by: Optional["_models.CreatedBy"] = None, + encrypted_content: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.REASONING # type: ignore + + +class ReasoningItemSummaryPart(_Model): + """ReasoningItemSummaryPart. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ReasoningItemSummaryTextPart + + :ivar type: Required. "summary_text" + :vartype type: str or ~azure.ai.projects.models.ReasoningItemSummaryPartType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. \"summary_text\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ReasoningItemSummaryTextPart(ReasoningItemSummaryPart, discriminator="summary_text"): + """ReasoningItemSummaryTextPart. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.SUMMARY_TEXT + :ivar text: Required. + :vartype text: str + """ + + type: Literal[ReasoningItemSummaryPartType.SUMMARY_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ReasoningItemSummaryPartType.SUMMARY_TEXT # type: ignore + + +class RecurrenceTrigger(Trigger, discriminator="Recurrence"): + """Recurrence based trigger. + + :ivar type: Type of the trigger. Required. Recurrence based trigger. + :vartype type: str or ~azure.ai.projects.models.RECURRENCE + :ivar start_time: Start time for the recurrence schedule in ISO 8601 format. + :vartype start_time: str + :ivar end_time: End time for the recurrence schedule in ISO 8601 format. + :vartype end_time: str + :ivar time_zone: Time zone for the recurrence schedule. + :vartype time_zone: str + :ivar interval: Interval for the recurrence schedule. Required. + :vartype interval: int + :ivar schedule: Recurrence schedule for the recurrence trigger. Required. + :vartype schedule: ~azure.ai.projects.models.RecurrenceSchedule + """ + + type: Literal[TriggerType.RECURRENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of the trigger. Required. Recurrence based trigger.""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) + """Start time for the recurrence schedule in ISO 8601 format.""" + end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) + """End time for the recurrence schedule in ISO 8601 format.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the recurrence schedule.""" + interval: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Interval for the recurrence schedule. Required.""" + schedule: "_models.RecurrenceSchedule" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Recurrence schedule for the recurrence trigger. Required.""" + + @overload + def __init__( + self, + *, + interval: int, + schedule: "_models.RecurrenceSchedule", + start_time: Optional[str] = None, + end_time: Optional[str] = None, + time_zone: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = TriggerType.RECURRENCE # type: ignore + + +class RedTeam(_Model): + """Red team details. + + :ivar name: Identifier of the red team run. Required. + :vartype name: str + :ivar display_name: Name of the red-team run. + :vartype display_name: str + :ivar num_turns: Number of simulation rounds. + :vartype num_turns: int + :ivar attack_strategies: List of attack strategies or nested lists of attack strategies. + :vartype attack_strategies: list[str or ~azure.ai.projects.models.AttackStrategy] + :ivar simulation_only: Simulation-only or Simulation + Evaluation. Default false, if true the + scan outputs conversation not evaluation result. + :vartype simulation_only: bool + :ivar risk_categories: List of risk categories to generate attack objectives for. + :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] + :ivar application_scenario: Application scenario for the red team operation, to generate + scenario specific attacks. + :vartype application_scenario: str + :ivar tags: Red team's tags. Unlike properties, tags are fully mutable. + :vartype tags: dict[str, str] + :ivar properties: Red team's properties. Unlike tags, properties are add-only. Once added, a + property cannot be removed. + :vartype properties: dict[str, str] + :ivar status: Status of the red-team. It is set by service and is read-only. + :vartype status: str + :ivar target: Target configuration for the red-team run. Required. + :vartype target: ~azure.ai.projects.models.TargetConfig + """ + + name: str = rest_field(name="id", visibility=["read"]) + """Identifier of the red team run. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the red-team run.""" + num_turns: Optional[int] = rest_field(name="numTurns", visibility=["read", "create", "update", "delete", "query"]) + """Number of simulation rounds.""" + attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = rest_field( + name="attackStrategies", visibility=["read", "create", "update", "delete", "query"] + ) + """List of attack strategies or nested lists of attack strategies.""" + simulation_only: Optional[bool] = rest_field( + name="simulationOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """Simulation-only or Simulation + Evaluation. Default false, if true the scan outputs + conversation not evaluation result.""" + risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = rest_field( + name="riskCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of risk categories to generate attack objectives for.""" + application_scenario: Optional[str] = rest_field( + name="applicationScenario", visibility=["read", "create", "update", "delete", "query"] + ) + """Application scenario for the red team operation, to generate scenario specific attacks.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's tags. Unlike properties, tags are fully mutable.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's properties. Unlike tags, properties are add-only. Once added, a property cannot be + removed.""" + status: Optional[str] = rest_field(visibility=["read"]) + """Status of the red-team. It is set by service and is read-only.""" + target: "_models.TargetConfig" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Target configuration for the red-team run. Required.""" + + @overload + def __init__( + self, + *, + target: "_models.TargetConfig", + display_name: Optional[str] = None, + num_turns: Optional[int] = None, + attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = None, + simulation_only: Optional[bool] = None, + risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = None, + application_scenario: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Response(_Model): + """Response. + + :ivar metadata: Set of 16 key-value pairs that can be attached to an object. This can be +useful for storing additional information about the object in a structured +format, and querying for objects via API or the dashboard. +Keys are strings with a maximum length of 64 characters. Values are strings +with a maximum length of 512 characters. Required. + :vartype metadata: dict[str, str] + :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 + will make the output more random, while lower values like 0.2 will make it more focused and + deterministic. +We generally recommend altering this or ``top_p`` but not both. Required. + :vartype temperature: float + :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, +where the model considers the results of the tokens with top_p probability +mass. So 0.1 means only the tokens comprising the top 10% probability mass +are considered. +We generally recommend altering this or ``temperature`` but not both. Required. + :vartype top_p: float + :ivar user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. `Learn more `_. Required. + :vartype user: str + :ivar service_tier: Note: service_tier is not applicable to Azure OpenAI. Known values are: + "auto", "default", "flex", "scale", and "priority". + :vartype service_tier: str or ~azure.ai.projects.models.ServiceTier + :ivar top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + :vartype top_logprobs: int + :ivar previous_response_id: The unique ID of the previous response to the model. Use this to +create multi-turn conversations. Learn more about +`conversation state `_. + :vartype previous_response_id: str + :ivar model: The model deployment to use for the creation of this response. + :vartype model: str + :ivar reasoning: + :vartype reasoning: ~azure.ai.projects.models.Reasoning + :ivar background: Whether to run the model response in the background. +`Learn more `_. + :vartype background: bool + :ivar max_output_tokens: An upper bound for the number of tokens that can be generated for a + response, including visible output tokens and `reasoning tokens `_. + :vartype max_output_tokens: int + :ivar max_tool_calls: The maximum number of total calls to built-in tools that can be processed + in a response. This maximum number applies across all built-in tool calls, not per individual + tool. Any further attempts to call a tool by the model will be ignored. + :vartype max_tool_calls: int + :ivar text: Configuration options for a text response from the model. Can be plain +text or structured JSON data. Learn more: + * [Text inputs and outputs](/docs/guides/text) + * [Structured Outputs](/docs/guides/structured-outputs). + :vartype text: ~azure.ai.projects.models.ResponseText + :ivar tools: An array of tools the model may call while generating a response. You +can specify which tool to use by setting the ``tool_choice`` parameter. +The two categories of tools you can provide the model are: + * **Built-in tools**: Tools that are provided by OpenAI that extend the +model's capabilities, like [web search](/docs/guides/tools-web-search) +or [file search](/docs/guides/tools-file-search). Learn more about +[built-in tools](/docs/guides/tools). + * **Function calls (custom tools)**: Functions that are defined by you, +enabling the model to call your own code. Learn more about +[function calling](/docs/guides/function-calling). + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar tool_choice: How the model should select which tool (or tools) to use when generating +a response. See the ``tools`` parameter to see how to specify which tools +the model can call. Is either a Union[str, "_models.ToolChoiceOptions"] type or a + ToolChoiceObject type. + :vartype tool_choice: str or ~azure.ai.projects.models.ToolChoiceOptions or + ~azure.ai.projects.models.ToolChoiceObject + :ivar prompt: + :vartype prompt: ~azure.ai.projects.models.Prompt + :ivar truncation: The truncation strategy to use for the model response. + * `auto`: If the context of this response and previous ones exceeds +the model's context window size, the model will truncate the +response to fit the context window by dropping input items in the +middle of the conversation. + * `disabled` (default): If a model response will exceed the context window +size for a model, the request will fail with a 400 error. Is either a Literal["auto"] type or a + Literal["disabled"] type. + :vartype truncation: str or str + :ivar id: Unique identifier for this Response. Required. + :vartype id: str + :ivar object: The object type of this resource - always set to ``response``. Required. Default + value is "response". + :vartype object: str + :ivar status: The status of the response generation. One of ``completed``, ``failed``, +``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: + Literal["completed"], Literal["failed"], Literal["in_progress"], Literal["cancelled"], + Literal["queued"], Literal["incomplete"] + :vartype status: str or str or str or str or str or str + :ivar created_at: Unix timestamp (in seconds) of when this Response was created. Required. + :vartype created_at: ~datetime.datetime + :ivar error: Required. + :vartype error: ~azure.ai.projects.models.ResponseError + :ivar incomplete_details: Details about why the response is incomplete. Required. + :vartype incomplete_details: ~azure.ai.projects.models.ResponseIncompleteDetails1 + :ivar output: An array of content items generated by the model. + * The length and order of items in the `output` array is dependent +on the model's response. + * Rather than accessing the first item in the `output` array and +assuming it's an `assistant` message with the content generated by +the model, you might consider using the `output_text` property where +supported in SDKs. Required. + :vartype output: list[~azure.ai.projects.models.ItemResource] + :ivar instructions: A system (or developer) message inserted into the model's context. +When using along with ``previous_response_id``, the instructions from a previous +response will not be carried over to the next response. This makes it simple +to swap out system (or developer) messages in new responses. Required. Is either a str type or + a [ItemParam] type. + :vartype instructions: str or list[~azure.ai.projects.models.ItemParam] + :ivar output_text: SDK-only convenience property that contains the aggregated text output +from all ``output_text`` items in the ``output`` array, if any are present. +Supported in the Python and JavaScript SDKs. + :vartype output_text: str + :ivar usage: + :vartype usage: ~azure.ai.projects.models.ResponseUsage + :ivar parallel_tool_calls: Whether to allow the model to run tool calls in parallel. Required. + :vartype parallel_tool_calls: bool + :ivar conversation: Required. + :vartype conversation: ~azure.ai.projects.models.ResponseConversation1 + :ivar agent: The agent used for this response. + :vartype agent: ~azure.ai.projects.models.AgentId + :ivar structured_inputs: The structured inputs to the response that can participate in prompt + template substitution or tool argument bindings. + :vartype structured_inputs: dict[str, any] + """ + + metadata: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. + Keys are strings with a maximum length of 64 characters. Values are strings + with a maximum length of 512 characters. Required.""" + temperature: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output + more random, while lower values like 0.2 will make it more focused and deterministic. + We generally recommend altering this or ``top_p`` but not both. Required.""" + top_p: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + We generally recommend altering this or ``temperature`` but not both. Required.""" + user: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A unique identifier representing your end-user, which can help OpenAI to monitor and detect + abuse. `Learn more `_. Required.""" + service_tier: Optional[Union[str, "_models.ServiceTier"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Note: service_tier is not applicable to Azure OpenAI. Known values are: \"auto\", \"default\", + \"flex\", \"scale\", and \"priority\".""" + top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An integer between 0 and 20 specifying the number of most likely tokens to return at each token + position, each with an associated log probability.""" + previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the previous response to the model. Use this to + create multi-turn conversations. Learn more about + `conversation state `_.""" + model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The model deployment to use for the creation of this response.""" + reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to run the model response in the background. + `Learn more `_.""" + max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An upper bound for the number of tokens that can be generated for a response, including visible + output tokens and `reasoning tokens `_.""" + max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of total calls to built-in tools that can be processed in a response. This + maximum number applies across all built-in tool calls, not per individual tool. Any further + attempts to call a tool by the model will be ignored.""" + text: Optional["_models.ResponseText"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Configuration options for a text response from the model. Can be plain + text or structured JSON data. Learn more: + * [Text inputs and outputs](/docs/guides/text) + * [Structured Outputs](/docs/guides/structured-outputs).""" + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter. + The two categories of tools you can provide the model are: + * **Built-in tools**: Tools that are provided by OpenAI that extend the + model's capabilities, like [web search](/docs/guides/tools-web-search) + or [file search](/docs/guides/tools-file-search). Learn more about + [built-in tools](/docs/guides/tools). + * **Function calls (custom tools)**: Functions that are defined by you, + enabling the model to call your own code. Learn more about + [function calling](/docs/guides/function-calling).""" + tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """How the model should select which tool (or tools) to use when generating + a response. See the ``tools`` parameter to see how to specify which tools + the model can call. Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a + ToolChoiceObject type.""" + prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + truncation: Optional[Literal["auto", "disabled"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The truncation strategy to use for the model response. + * `auto`: If the context of this response and previous ones exceeds + the model's context window size, the model will truncate the + response to fit the context window by dropping input items in the + middle of the conversation. + * `disabled` (default): If a model response will exceed the context window + size for a model, the request will fail with a 400 error. Is either a Literal[\"auto\"] type or + a Literal[\"disabled\"] type.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier for this Response. Required.""" + object: Literal["response"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type of this resource - always set to ``response``. Required. Default value is + \"response\".""" + status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the response generation. One of ``completed``, ``failed``, + ``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: + Literal[\"completed\"], Literal[\"failed\"], Literal[\"in_progress\"], Literal[\"cancelled\"], + Literal[\"queued\"], Literal[\"incomplete\"]""" + created_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """Unix timestamp (in seconds) of when this Response was created. Required.""" + error: "_models.ResponseError" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + incomplete_details: "_models.ResponseIncompleteDetails1" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Details about why the response is incomplete. Required.""" + output: list["_models.ItemResource"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of content items generated by the model. + * The length and order of items in the `output` array is dependent + on the model's response. + * Rather than accessing the first item in the `output` array and + assuming it's an `assistant` message with the content generated by + the model, you might consider using the `output_text` property where + supported in SDKs. Required.""" + instructions: Union[str, list["_models.ItemParam"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A system (or developer) message inserted into the model's context. + When using along with ``previous_response_id``, the instructions from a previous + response will not be carried over to the next response. This makes it simple + to swap out system (or developer) messages in new responses. Required. Is either a str type or + a [ItemParam] type.""" + output_text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """SDK-only convenience property that contains the aggregated text output + from all ``output_text`` items in the ``output`` array, if any are present. + Supported in the Python and JavaScript SDKs.""" + usage: Optional["_models.ResponseUsage"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + parallel_tool_calls: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to allow the model to run tool calls in parallel. Required.""" + conversation: "_models.ResponseConversation1" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The agent used for this response.""" + structured_inputs: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The structured inputs to the response that can participate in prompt template substitution or + tool argument bindings.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + metadata: dict[str, str], + temperature: float, + top_p: float, + user: str, + id: str, # pylint: disable=redefined-builtin + created_at: datetime.datetime, + error: "_models.ResponseError", + incomplete_details: "_models.ResponseIncompleteDetails1", + output: list["_models.ItemResource"], + instructions: Union[str, list["_models.ItemParam"]], + parallel_tool_calls: bool, + conversation: "_models.ResponseConversation1", + service_tier: Optional[Union[str, "_models.ServiceTier"]] = None, + top_logprobs: Optional[int] = None, + previous_response_id: Optional[str] = None, + model: Optional[str] = None, + reasoning: Optional["_models.Reasoning"] = None, + background: Optional[bool] = None, + max_output_tokens: Optional[int] = None, + max_tool_calls: Optional[int] = None, + text: Optional["_models.ResponseText"] = None, + tools: Optional[list["_models.Tool"]] = None, + tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = None, + prompt: Optional["_models.Prompt"] = None, + truncation: Optional[Literal["auto", "disabled"]] = None, + status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = None, + output_text: Optional[str] = None, + usage: Optional["_models.ResponseUsage"] = None, + agent: Optional["_models.AgentId"] = None, + structured_inputs: Optional[dict[str, Any]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["response"] = "response" + + +class ResponseStreamEvent(_Model): + """ResponseStreamEvent. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponseErrorEvent, ResponseCodeInterpreterCallCompletedEvent, + ResponseCodeInterpreterCallInProgressEvent, ResponseCodeInterpreterCallInterpretingEvent, + ResponseCodeInterpreterCallCodeDeltaEvent, ResponseCodeInterpreterCallCodeDoneEvent, + ResponseCompletedEvent, ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, + ResponseCreatedEvent, ResponseFailedEvent, ResponseFileSearchCallCompletedEvent, + ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent, + ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, + ResponseImageGenCallCompletedEvent, ResponseImageGenCallGeneratingEvent, + ResponseImageGenCallInProgressEvent, ResponseImageGenCallPartialImageEvent, + ResponseInProgressEvent, ResponseIncompleteEvent, ResponseMCPCallArgumentsDeltaEvent, + ResponseMCPCallArgumentsDoneEvent, ResponseMCPCallCompletedEvent, ResponseMCPCallFailedEvent, + ResponseMCPCallInProgressEvent, ResponseMCPListToolsCompletedEvent, + ResponseMCPListToolsFailedEvent, ResponseMCPListToolsInProgressEvent, + ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent, ResponseTextDeltaEvent, + ResponseTextDoneEvent, ResponseQueuedEvent, ResponseReasoningDeltaEvent, + ResponseReasoningDoneEvent, ResponseReasoningSummaryDeltaEvent, + ResponseReasoningSummaryDoneEvent, ResponseReasoningSummaryPartAddedEvent, + ResponseReasoningSummaryPartDoneEvent, ResponseReasoningSummaryTextDeltaEvent, + ResponseReasoningSummaryTextDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent, + ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent, + ResponseWebSearchCallSearchingEvent + + :ivar type: Required. Known values are: "response.audio.delta", "response.audio.done", + "response.audio_transcript.delta", "response.audio_transcript.done", + "response.code_interpreter_call_code.delta", "response.code_interpreter_call_code.done", + "response.code_interpreter_call.completed", "response.code_interpreter_call.in_progress", + "response.code_interpreter_call.interpreting", "response.completed", + "response.content_part.added", "response.content_part.done", "response.created", "error", + "response.file_search_call.completed", "response.file_search_call.in_progress", + "response.file_search_call.searching", "response.function_call_arguments.delta", + "response.function_call_arguments.done", "response.in_progress", "response.failed", + "response.incomplete", "response.output_item.added", "response.output_item.done", + "response.refusal.delta", "response.refusal.done", "response.output_text.annotation.added", + "response.output_text.delta", "response.output_text.done", + "response.reasoning_summary_part.added", "response.reasoning_summary_part.done", + "response.reasoning_summary_text.delta", "response.reasoning_summary_text.done", + "response.web_search_call.completed", "response.web_search_call.in_progress", + "response.web_search_call.searching", "response.image_generation_call.completed", + "response.image_generation_call.generating", "response.image_generation_call.in_progress", + "response.image_generation_call.partial_image", "response.mcp_call.arguments_delta", + "response.mcp_call.arguments_done", "response.mcp_call.completed", "response.mcp_call.failed", + "response.mcp_call.in_progress", "response.mcp_list_tools.completed", + "response.mcp_list_tools.failed", "response.mcp_list_tools.in_progress", "response.queued", + "response.reasoning.delta", "response.reasoning.done", "response.reasoning_summary.delta", and + "response.reasoning_summary.done". + :vartype type: str or ~azure.ai.projects.models.ResponseStreamEventType + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"response.audio.delta\", \"response.audio.done\", + \"response.audio_transcript.delta\", \"response.audio_transcript.done\", + \"response.code_interpreter_call_code.delta\", \"response.code_interpreter_call_code.done\", + \"response.code_interpreter_call.completed\", \"response.code_interpreter_call.in_progress\", + \"response.code_interpreter_call.interpreting\", \"response.completed\", + \"response.content_part.added\", \"response.content_part.done\", \"response.created\", + \"error\", \"response.file_search_call.completed\", \"response.file_search_call.in_progress\", + \"response.file_search_call.searching\", \"response.function_call_arguments.delta\", + \"response.function_call_arguments.done\", \"response.in_progress\", \"response.failed\", + \"response.incomplete\", \"response.output_item.added\", \"response.output_item.done\", + \"response.refusal.delta\", \"response.refusal.done\", + \"response.output_text.annotation.added\", \"response.output_text.delta\", + \"response.output_text.done\", \"response.reasoning_summary_part.added\", + \"response.reasoning_summary_part.done\", \"response.reasoning_summary_text.delta\", + \"response.reasoning_summary_text.done\", \"response.web_search_call.completed\", + \"response.web_search_call.in_progress\", \"response.web_search_call.searching\", + \"response.image_generation_call.completed\", \"response.image_generation_call.generating\", + \"response.image_generation_call.in_progress\", + \"response.image_generation_call.partial_image\", \"response.mcp_call.arguments_delta\", + \"response.mcp_call.arguments_done\", \"response.mcp_call.completed\", + \"response.mcp_call.failed\", \"response.mcp_call.in_progress\", + \"response.mcp_list_tools.completed\", \"response.mcp_list_tools.failed\", + \"response.mcp_list_tools.in_progress\", \"response.queued\", \"response.reasoning.delta\", + \"response.reasoning.done\", \"response.reasoning_summary.delta\", and + \"response.reasoning_summary.done\".""" + sequence_number: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The sequence number for this event. Required.""" + + @overload + def __init__( + self, + *, + type: str, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseCodeInterpreterCallCodeDeltaEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call_code.delta" +): # pylint: disable=name-too-long + """Emitted when a partial code snippet is streamed by the code interpreter. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call_code.delta``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA + :ivar output_index: The index of the output item in the response for which the code is being + streamed. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + :ivar delta: The partial code snippet being streamed by the code interpreter. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call_code.delta``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code is being streamed. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial code snippet being streamed by the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA # type: ignore + + +class ResponseCodeInterpreterCallCodeDoneEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call_code.done" +): + """Emitted when the code snippet is finalized by the code interpreter. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call_code.done``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE + :ivar output_index: The index of the output item in the response for which the code is + finalized. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + :ivar code: The final code snippet output by the code interpreter. Required. + :vartype code: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call_code.done``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code is finalized. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The final code snippet output by the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + code: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE # type: ignore + + +class ResponseCodeInterpreterCallCompletedEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call.completed" +): # pylint: disable=name-too-long + """Emitted when the code interpreter call is completed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call.completed``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED + :ivar output_index: The index of the output item in the response for which the code interpreter + call is completed. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call.completed``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code interpreter call is completed. + Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED # type: ignore + + +class ResponseCodeInterpreterCallInProgressEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call.in_progress" +): # pylint: disable=name-too-long + """Emitted when a code interpreter call is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call.in_progress``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS + :ivar output_index: The index of the output item in the response for which the code interpreter + call is in progress. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call.in_progress``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code interpreter call is in + progress. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS # type: ignore + + +class ResponseCodeInterpreterCallInterpretingEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call.interpreting" +): # pylint: disable=name-too-long + """Emitted when the code interpreter is actively interpreting the code snippet. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call.interpreting``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING + :ivar output_index: The index of the output item in the response for which the code interpreter + is interpreting code. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call.interpreting``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code interpreter is interpreting + code. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING # type: ignore + + +class ResponseCompletedEvent(ResponseStreamEvent, discriminator="response.completed"): + """Emitted when the model response is complete. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.completed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_COMPLETED + :ivar response: Properties of the completed response. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.completed``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Properties of the completed response. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_COMPLETED # type: ignore + + +class ResponseContentPartAddedEvent(ResponseStreamEvent, discriminator="response.content_part.added"): + """Emitted when a new content part is added. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.content_part.added``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_ADDED + :ivar item_id: The ID of the output item that the content part was added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the content part was added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that was added. Required. + :vartype content_index: int + :ivar part: The content part that was added. Required. + :vartype part: ~azure.ai.projects.models.ItemContent + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.content_part.added``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the content part was added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the content part was added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that was added. Required.""" + part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content part that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + part: "_models.ItemContent", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED # type: ignore + + +class ResponseContentPartDoneEvent(ResponseStreamEvent, discriminator="response.content_part.done"): + """Emitted when a content part is done. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.content_part.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_DONE + :ivar item_id: The ID of the output item that the content part was added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the content part was added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that is done. Required. + :vartype content_index: int + :ivar part: The content part that is done. Required. + :vartype part: ~azure.ai.projects.models.ItemContent + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.content_part.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the content part was added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the content part was added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that is done. Required.""" + part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content part that is done. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + part: "_models.ItemContent", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE # type: ignore + + +class ResponseConversation1(_Model): + """ResponseConversation1. + + :ivar id: Required. + :vartype id: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseCreatedEvent(ResponseStreamEvent, discriminator="response.created"): + """An event that is emitted when a response is created. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.created``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CREATED + :ivar response: The response that was created. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CREATED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.created``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that was created. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CREATED # type: ignore + + +class ResponseError(_Model): + """An error object returned when the model fails to generate a Response. + + :ivar code: Required. Known values are: "server_error", "rate_limit_exceeded", + "invalid_prompt", "vector_store_timeout", "invalid_image", "invalid_image_format", + "invalid_base64_image", "invalid_image_url", "image_too_large", "image_too_small", + "image_parse_error", "image_content_policy_violation", "invalid_image_mode", + "image_file_too_large", "unsupported_image_media_type", "empty_image_file", + "failed_to_download_image", and "image_file_not_found". + :vartype code: str or ~azure.ai.projects.models.ResponseErrorCode + :ivar message: A human-readable description of the error. Required. + :vartype message: str + """ + + code: Union[str, "_models.ResponseErrorCode"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Known values are: \"server_error\", \"rate_limit_exceeded\", \"invalid_prompt\", + \"vector_store_timeout\", \"invalid_image\", \"invalid_image_format\", + \"invalid_base64_image\", \"invalid_image_url\", \"image_too_large\", \"image_too_small\", + \"image_parse_error\", \"image_content_policy_violation\", \"invalid_image_mode\", + \"image_file_too_large\", \"unsupported_image_media_type\", \"empty_image_file\", + \"failed_to_download_image\", and \"image_file_not_found\".""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the error. Required.""" + + @overload + def __init__( + self, + *, + code: Union[str, "_models.ResponseErrorCode"], + message: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseErrorEvent(ResponseStreamEvent, discriminator="error"): + """Emitted when an error occurs. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``error``. Required. + :vartype type: str or ~azure.ai.projects.models.ERROR + :ivar code: The error code. Required. + :vartype code: str + :ivar message: The error message. Required. + :vartype message: str + :ivar param: The error parameter. Required. + :vartype param: str + """ + + type: Literal[ResponseStreamEventType.ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``error``. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error message. Required.""" + param: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error parameter. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + code: str, + message: str, + param: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.ERROR # type: ignore + + +class ResponseFailedEvent(ResponseStreamEvent, discriminator="response.failed"): + """An event that is emitted when a response fails. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.failed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FAILED + :ivar response: The response that failed. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.failed``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that failed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FAILED # type: ignore + + +class ResponseFileSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.file_search_call.completed"): + """Emitted when a file search call is completed (results found). + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.file_search_call.completed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_COMPLETED + :ivar output_index: The index of the output item that the file search call is initiated. + Required. + :vartype output_index: int + :ivar item_id: The ID of the output item that the file search call is initiated. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.file_search_call.completed``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the file search call is initiated. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the file search call is initiated. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED # type: ignore + + +class ResponseFileSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.file_search_call.in_progress"): + """Emitted when a file search call is initiated. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.file_search_call.in_progress``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS + :ivar output_index: The index of the output item that the file search call is initiated. + Required. + :vartype output_index: int + :ivar item_id: The ID of the output item that the file search call is initiated. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.file_search_call.in_progress``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the file search call is initiated. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the file search call is initiated. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS # type: ignore + + +class ResponseFileSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.file_search_call.searching"): + """Emitted when a file search is currently searching. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.file_search_call.searching``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_SEARCHING + :ivar output_index: The index of the output item that the file search call is searching. + Required. + :vartype output_index: int + :ivar item_id: The ID of the output item that the file search call is initiated. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.file_search_call.searching``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the file search call is searching. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the file search call is initiated. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING # type: ignore + + +class ResponseFormatJsonSchemaSchema(_Model): + """The schema for the response format, described as a JSON Schema object. + Learn how to build JSON schemas `here `_. + + """ + + +class ResponseFunctionCallArgumentsDeltaEvent( + ResponseStreamEvent, discriminator="response.function_call_arguments.delta" +): + """Emitted when there is a partial function-call arguments delta. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.function_call_arguments.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA + :ivar item_id: The ID of the output item that the function-call arguments delta is added to. + Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the function-call arguments delta is + added to. Required. + :vartype output_index: int + :ivar delta: The function-call arguments delta that is added. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.function_call_arguments.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the function-call arguments delta is added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the function-call arguments delta is added to. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The function-call arguments delta that is added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA # type: ignore + + +class ResponseFunctionCallArgumentsDoneEvent( + ResponseStreamEvent, discriminator="response.function_call_arguments.done" +): + """Emitted when function-call arguments are finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE + :ivar item_id: The ID of the item. Required. + :vartype item_id: str + :ivar output_index: The index of the output item. Required. + :vartype output_index: int + :ivar arguments: The function-call arguments. Required. + :vartype arguments: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The function-call arguments. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + arguments: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE # type: ignore + + +class ResponseImageGenCallCompletedEvent(ResponseStreamEvent, discriminator="response.image_generation_call.completed"): + """Emitted when an image generation tool call has completed and the final image is available. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.completed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.completed'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED # type: ignore + + +class ResponseImageGenCallGeneratingEvent( + ResponseStreamEvent, discriminator="response.image_generation_call.generating" +): + """Emitted when an image generation tool call is actively generating an image (intermediate + state). + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.generating'. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_GENERATING + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.generating'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING # type: ignore + + +class ResponseImageGenCallInProgressEvent( + ResponseStreamEvent, discriminator="response.image_generation_call.in_progress" +): + """Emitted when an image generation tool call is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.in_progress'. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.in_progress'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS # type: ignore + + +class ResponseImageGenCallPartialImageEvent( + ResponseStreamEvent, discriminator="response.image_generation_call.partial_image" +): + """Emitted when a partial image is available during image generation streaming. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.partial_image'. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + :ivar partial_image_index: 0-based index for the partial image (backend is 1-based, but this is + 0-based for the user). Required. + :vartype partial_image_index: int + :ivar partial_image_b64: Base64-encoded partial image data, suitable for rendering as an image. + Required. + :vartype partial_image_b64: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.partial_image'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + partial_image_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """0-based index for the partial image (backend is 1-based, but this is 0-based for the user). + Required.""" + partial_image_b64: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded partial image data, suitable for rendering as an image. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + partial_image_index: int, + partial_image_b64: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE # type: ignore + + +class ResponseIncompleteDetails1(_Model): + """ResponseIncompleteDetails1. + + :ivar reason: The reason why the response is incomplete. Is either a + Literal["max_output_tokens"] type or a Literal["content_filter"] type. + :vartype reason: str or str + """ + + reason: Optional[Literal["max_output_tokens", "content_filter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The reason why the response is incomplete. Is either a Literal[\"max_output_tokens\"] type or a + Literal[\"content_filter\"] type.""" + + @overload + def __init__( + self, + *, + reason: Optional[Literal["max_output_tokens", "content_filter"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseIncompleteEvent(ResponseStreamEvent, discriminator="response.incomplete"): + """An event that is emitted when a response finishes as incomplete. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.incomplete``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_INCOMPLETE + :ivar response: The response that was incomplete. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_INCOMPLETE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.incomplete``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that was incomplete. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_INCOMPLETE # type: ignore + + +class ResponseInProgressEvent(ResponseStreamEvent, discriminator="response.in_progress"): + """Emitted when the response is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.in_progress``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IN_PROGRESS + :ivar response: The response that is in progress. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.in_progress``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that is in progress. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IN_PROGRESS # type: ignore + + +class ResponseMCPCallArgumentsDeltaEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_delta"): + """Emitted when there is a delta (partial update) to the arguments of an MCP tool call. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.arguments_delta'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DELTA + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. + :vartype item_id: str + :ivar delta: The partial update to the arguments for the MCP tool call. Required. + :vartype delta: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.arguments_delta'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the MCP tool call item being processed. Required.""" + delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial update to the arguments for the MCP tool call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + delta: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA # type: ignore + + +class ResponseMCPCallArgumentsDoneEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_done"): + """Emitted when the arguments for an MCP tool call are finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.arguments_done'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DONE + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. + :vartype item_id: str + :ivar arguments: The finalized arguments for the MCP tool call. Required. + :vartype arguments: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.arguments_done'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the MCP tool call item being processed. Required.""" + arguments: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The finalized arguments for the MCP tool call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + arguments: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE # type: ignore + + +class ResponseMCPCallCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_call.completed"): + """Emitted when an MCP tool call has completed successfully. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.completed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_COMPLETED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.completed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED # type: ignore + + +class ResponseMCPCallFailedEvent(ResponseStreamEvent, discriminator="response.mcp_call.failed"): + """Emitted when an MCP tool call has failed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.failed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_FAILED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.failed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED # type: ignore + + +class ResponseMCPCallInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_call.in_progress"): + """Emitted when an MCP tool call is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.in_progress'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_IN_PROGRESS + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.in_progress'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the MCP tool call item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS # type: ignore + + +class ResponseMCPListToolsCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.completed"): + """Emitted when the list of available MCP tools has been successfully retrieved. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_list_tools.completed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_COMPLETED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_list_tools.completed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED # type: ignore + + +class ResponseMCPListToolsFailedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.failed"): + """Emitted when the attempt to list available MCP tools has failed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_list_tools.failed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_FAILED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_list_tools.failed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED # type: ignore + + +class ResponseMCPListToolsInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.in_progress"): + """Emitted when the system is in the process of retrieving the list of available MCP tools. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_list_tools.in_progress'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS # type: ignore + + +class ResponseOutputItemAddedEvent(ResponseStreamEvent, discriminator="response.output_item.added"): + """Emitted when a new output item is added. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_item.added``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_ADDED + :ivar output_index: The index of the output item that was added. Required. + :vartype output_index: int + :ivar item: The output item that was added. Required. + :vartype item: ~azure.ai.projects.models.ItemResource + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_item.added``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that was added. Required.""" + item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output item that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item: "_models.ItemResource", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED # type: ignore + + +class ResponseOutputItemDoneEvent(ResponseStreamEvent, discriminator="response.output_item.done"): + """Emitted when an output item is marked done. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_item.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_DONE + :ivar output_index: The index of the output item that was marked done. Required. + :vartype output_index: int + :ivar item: The output item that was marked done. Required. + :vartype item: ~azure.ai.projects.models.ItemResource + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_item.done``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that was marked done. Required.""" + item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output item that was marked done. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item: "_models.ItemResource", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE # type: ignore + + +class ResponsePromptVariables(_Model): + """Optional map of values to substitute in for variables in your + prompt. The substitution values can either be strings, or other + Response input types like images or files. + + """ + + +class ResponseQueuedEvent(ResponseStreamEvent, discriminator="response.queued"): + """Emitted when a response is queued and waiting to be processed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.queued'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_QUEUED + :ivar response: The full response object that is queued. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_QUEUED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.queued'. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The full response object that is queued. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_QUEUED # type: ignore + + +class ResponseReasoningDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning.delta"): + """Emitted when there is a delta (partial update) to the reasoning content. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning.delta'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DELTA + :ivar item_id: The unique identifier of the item for which reasoning is being updated. + Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar content_index: The index of the reasoning content part within the output item. Required. + :vartype content_index: int + :ivar delta: The partial update to the reasoning content. Required. + :vartype delta: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning.delta'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which reasoning is being updated. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the reasoning content part within the output item. Required.""" + delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial update to the reasoning content. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + delta: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_DELTA # type: ignore + + +class ResponseReasoningDoneEvent(ResponseStreamEvent, discriminator="response.reasoning.done"): + """Emitted when the reasoning content is finalized for an item. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning.done'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DONE + :ivar item_id: The unique identifier of the item for which reasoning is finalized. Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar content_index: The index of the reasoning content part within the output item. Required. + :vartype content_index: int + :ivar text: The finalized reasoning text. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning.done'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which reasoning is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the reasoning content part within the output item. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The finalized reasoning text. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_DONE # type: ignore + + +class ResponseReasoningSummaryDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.delta"): + """Emitted when there is a delta (partial update) to the reasoning summary content. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning_summary.delta'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DELTA + :ivar item_id: The unique identifier of the item for which the reasoning summary is being + updated. Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the output item. Required. + :vartype summary_index: int + :ivar delta: The partial update to the reasoning summary content. Required. + :vartype delta: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning_summary.delta'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which the reasoning summary is being updated. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the output item. Required.""" + delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial update to the reasoning summary content. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + delta: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA # type: ignore + + +class ResponseReasoningSummaryDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.done"): + """Emitted when the reasoning summary content is finalized for an item. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning_summary.done'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DONE + :ivar item_id: The unique identifier of the item for which the reasoning summary is finalized. + Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the output item. Required. + :vartype summary_index: int + :ivar text: The finalized reasoning summary text. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning_summary.done'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which the reasoning summary is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the output item. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The finalized reasoning summary text. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE # type: ignore + + +class ResponseReasoningSummaryPartAddedEvent( + ResponseStreamEvent, discriminator="response.reasoning_summary_part.added" +): + """Emitted when a new reasoning summary part is added. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_part.added``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_ADDED + :ivar item_id: The ID of the item this summary part is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary part is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar part: The summary part that was added. Required. + :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_part.added``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary part is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary part is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The summary part that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + part: "_models.ReasoningItemSummaryPart", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED # type: ignore + + +class ResponseReasoningSummaryPartDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_part.done"): + """Emitted when a reasoning summary part is completed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_part.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_DONE + :ivar item_id: The ID of the item this summary part is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary part is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar part: The completed summary part. Required. + :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_part.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary part is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary part is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The completed summary part. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + part: "_models.ReasoningItemSummaryPart", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE # type: ignore + + +class ResponseReasoningSummaryTextDeltaEvent( + ResponseStreamEvent, discriminator="response.reasoning_summary_text.delta" +): + """Emitted when a delta is added to a reasoning summary text. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_text.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DELTA + :ivar item_id: The ID of the item this summary text delta is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary text delta is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar delta: The text delta that was added to the summary. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_text.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary text delta is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary text delta is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text delta that was added to the summary. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA # type: ignore + + +class ResponseReasoningSummaryTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_text.done"): + """Emitted when a reasoning summary text is completed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_text.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DONE + :ivar item_id: The ID of the item this summary text is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary text is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar text: The full text of the completed reasoning summary. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_text.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary text is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary text is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The full text of the completed reasoning summary. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE # type: ignore + + +class ResponseRefusalDeltaEvent(ResponseStreamEvent, discriminator="response.refusal.delta"): + """Emitted when there is a partial refusal text. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.refusal.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DELTA + :ivar item_id: The ID of the output item that the refusal text is added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the refusal text is added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the refusal text is added to. Required. + :vartype content_index: int + :ivar delta: The refusal text that is added. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.refusal.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the refusal text is added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the refusal text is added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the refusal text is added to. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The refusal text that is added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DELTA # type: ignore + + +class ResponseRefusalDoneEvent(ResponseStreamEvent, discriminator="response.refusal.done"): + """Emitted when refusal text is finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.refusal.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DONE + :ivar item_id: The ID of the output item that the refusal text is finalized. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the refusal text is finalized. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the refusal text is finalized. + Required. + :vartype content_index: int + :ivar refusal: The refusal text that is finalized. Required. + :vartype refusal: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.refusal.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the refusal text is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the refusal text is finalized. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the refusal text is finalized. Required.""" + refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The refusal text that is finalized. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + refusal: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DONE # type: ignore + + +class ResponsesMessageItemParam(ItemParam, discriminator="message"): + """A response message item, representing a role and content, as provided as client request + parameters. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponsesAssistantMessageItemParam, ResponsesDeveloperMessageItemParam, + ResponsesSystemMessageItemParam, ResponsesUserMessageItemParam + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role associated with the message. Required. Known values are: "system", + "developer", "user", and "assistant". + :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole + """ + + __mapping__: dict[str, _Model] = {} + type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the responses item, which is always 'message'. Required.""" + role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) + """The role associated with the message. Required. Known values are: \"system\", \"developer\", + \"user\", and \"assistant\".""" + + @overload + def __init__( + self, + *, + role: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MESSAGE # type: ignore + + +class ResponsesAssistantMessageItemParam(ResponsesMessageItemParam, discriminator="assistant"): + """A message parameter item with the ``assistant`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``assistant``. Required. + :vartype role: str or ~azure.ai.projects.models.ASSISTANT + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``assistant``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.ASSISTANT # type: ignore + + +class ResponsesMessageItemResource(ItemResource, discriminator="message"): + """A response message resource item, representing a role and content, as provided on service + responses. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponsesAssistantMessageItemResource, ResponsesDeveloperMessageItemResource, + ResponsesSystemMessageItemResource, ResponsesUserMessageItemResource + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role associated with the message. Required. Known values are: "system", + "developer", "user", and "assistant". + :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole + """ + + __mapping__: dict[str, _Model] = {} + type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the responses item, which is always 'message'. Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) + """The role associated with the message. Required. Known values are: \"system\", \"developer\", + \"user\", and \"assistant\".""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + role: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MESSAGE # type: ignore + + +class ResponsesAssistantMessageItemResource(ResponsesMessageItemResource, discriminator="assistant"): + """A message resource item with the ``assistant`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``assistant``. Required. + :vartype role: str or ~azure.ai.projects.models.ASSISTANT + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``assistant``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.ASSISTANT # type: ignore + + +class ResponsesDeveloperMessageItemParam(ResponsesMessageItemParam, discriminator="developer"): + """A message parameter item with the ``developer`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``developer``. Required. + :vartype role: str or ~azure.ai.projects.models.DEVELOPER + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``developer``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.DEVELOPER # type: ignore + + +class ResponsesDeveloperMessageItemResource(ResponsesMessageItemResource, discriminator="developer"): + """A message resource item with the ``developer`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``developer``. Required. + :vartype role: str or ~azure.ai.projects.models.DEVELOPER + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``developer``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.DEVELOPER # type: ignore + + +class ResponsesSystemMessageItemParam(ResponsesMessageItemParam, discriminator="system"): + """A message parameter item with the ``system`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``system``. Required. + :vartype role: str or ~azure.ai.projects.models.SYSTEM + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``system``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.SYSTEM # type: ignore + + +class ResponsesSystemMessageItemResource(ResponsesMessageItemResource, discriminator="system"): + """A message resource item with the ``system`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``system``. Required. + :vartype role: str or ~azure.ai.projects.models.SYSTEM + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``system``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.SYSTEM # type: ignore + + +class ResponsesUserMessageItemParam(ResponsesMessageItemParam, discriminator="user"): + """A message parameter item with the ``user`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``user``. Required. + :vartype role: str or ~azure.ai.projects.models.USER + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``user``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.USER # type: ignore + + +class ResponsesUserMessageItemResource(ResponsesMessageItemResource, discriminator="user"): + """A message resource item with the ``user`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``user``. Required. + :vartype role: str or ~azure.ai.projects.models.USER + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``user``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.USER # type: ignore + + +class ResponseText(_Model): + """ResponseText. + + :ivar format: + :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration + """ + + format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + + @overload + def __init__( + self, + *, + format: Optional["_models.ResponseTextFormatConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseTextDeltaEvent(ResponseStreamEvent, discriminator="response.output_text.delta"): + """Emitted when there is an additional text delta. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_text.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DELTA + :ivar item_id: The ID of the output item that the text delta was added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the text delta was added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the text delta was added to. Required. + :vartype content_index: int + :ivar delta: The text delta that was added. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_text.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the text delta was added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the text delta was added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the text delta was added to. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text delta that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA # type: ignore + + +class ResponseTextDoneEvent(ResponseStreamEvent, discriminator="response.output_text.done"): + """Emitted when text content is finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_text.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DONE + :ivar item_id: The ID of the output item that the text content is finalized. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the text content is finalized. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the text content is finalized. + Required. + :vartype content_index: int + :ivar text: The text content that is finalized. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_text.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the text content is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the text content is finalized. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the text content is finalized. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text content that is finalized. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE # type: ignore + + +class ResponseTextFormatConfiguration(_Model): + """ResponseTextFormatConfiguration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponseTextFormatConfigurationJsonObject, ResponseTextFormatConfigurationJsonSchema, + ResponseTextFormatConfigurationText + + :ivar type: Required. Known values are: "text", "json_schema", and "json_object". + :vartype type: str or ~azure.ai.projects.models.ResponseTextFormatConfigurationType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"text\", \"json_schema\", and \"json_object\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseTextFormatConfigurationJsonObject( + ResponseTextFormatConfiguration, discriminator="json_object" +): # pylint: disable=name-too-long + """ResponseTextFormatConfigurationJsonObject. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.JSON_OBJECT + """ + + type: Literal[ResponseTextFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseTextFormatConfigurationType.JSON_OBJECT # type: ignore + + +class ResponseTextFormatConfigurationJsonSchema( + ResponseTextFormatConfiguration, discriminator="json_schema" +): # pylint: disable=name-too-long + """JSON Schema response format. Used to generate structured JSON responses. + Learn more about `Structured Outputs `_. + + :ivar type: The type of response format being defined. Always ``json_schema``. Required. + :vartype type: str or ~azure.ai.projects.models.JSON_SCHEMA + :ivar description: A description of what the response format is for, used by the model to + determine how to respond in the format. + :vartype description: str + :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain + underscores and dashes, with a maximum length of 64. Required. + :vartype name: str + :ivar schema: Required. + :vartype schema: ~azure.ai.projects.models.ResponseFormatJsonSchemaSchema + :ivar strict: Whether to enable strict schema adherence when generating the output. + If set to true, the model will always follow the exact schema defined + in the ``schema`` field. Only a subset of JSON Schema is supported when + ``strict`` is ``true``. To learn more, read the `Structured Outputs + guide `_. + :vartype strict: bool + """ + + type: Literal[ResponseTextFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of response format being defined. Always ``json_schema``. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the response format is for, used by the model to + determine how to respond in the format.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the response format. Must be a-z, A-Z, 0-9, or contain + underscores and dashes, with a maximum length of 64. Required.""" + schema: "_models.ResponseFormatJsonSchemaSchema" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable strict schema adherence when generating the output. + If set to true, the model will always follow the exact schema defined + in the ``schema`` field. Only a subset of JSON Schema is supported when + ``strict`` is ``true``. To learn more, read the `Structured Outputs + guide `_.""" + + @overload + def __init__( + self, + *, + name: str, + schema: "_models.ResponseFormatJsonSchemaSchema", + description: Optional[str] = None, + strict: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseTextFormatConfigurationType.JSON_SCHEMA # type: ignore + + +class ResponseTextFormatConfigurationText(ResponseTextFormatConfiguration, discriminator="text"): + """ResponseTextFormatConfigurationText. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.TEXT + """ + + type: Literal[ResponseTextFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseTextFormatConfigurationType.TEXT # type: ignore + + +class ResponseUsage(_Model): + """Represents token usage details including input tokens, output tokens, + a breakdown of output tokens, and the total tokens used. + + :ivar input_tokens: The number of input tokens. Required. + :vartype input_tokens: int + :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. + :vartype input_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails + :ivar output_tokens: The number of output tokens. Required. + :vartype output_tokens: int + :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. + :vartype output_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails + :ivar total_tokens: The total number of tokens used. Required. + :vartype total_tokens: int + """ + + input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of input tokens. Required.""" + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the input tokens. Required.""" + output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of output tokens. Required.""" + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the output tokens. Required.""" + total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The total number of tokens used. Required.""" + + @overload + def __init__( + self, + *, + input_tokens: int, + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", + output_tokens: int, + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", + total_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseWebSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.web_search_call.completed"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.web_search_call.completed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_COMPLETED + :ivar output_index: The index of the output item that the web search call is associated with. + Required. + :vartype output_index: int + :ivar item_id: Unique ID for the output item associated with the web search call. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.web_search_call.completed``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the web search call is associated with. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID for the output item associated with the web search call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED # type: ignore + + +class ResponseWebSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.web_search_call.in_progress"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.web_search_call.in_progress``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS + :ivar output_index: The index of the output item that the web search call is associated with. + Required. + :vartype output_index: int + :ivar item_id: Unique ID for the output item associated with the web search call. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.web_search_call.in_progress``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the web search call is associated with. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID for the output item associated with the web search call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS # type: ignore + + +class ResponseWebSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.web_search_call.searching"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.web_search_call.searching``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_SEARCHING + :ivar output_index: The index of the output item that the web search call is associated with. + Required. + :vartype output_index: int + :ivar item_id: Unique ID for the output item associated with the web search call. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.web_search_call.searching``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the web search call is associated with. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID for the output item associated with the web search call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING # type: ignore + + +class SASCredentials(BaseCredentials, discriminator="SAS"): + """Shared Access Signature (SAS) credential definition. + + :ivar type: The credential type. Required. Shared Access Signature (SAS) credential + :vartype type: str or ~azure.ai.projects.models.SAS + :ivar sas_token: SAS token. + :vartype sas_token: str + """ + + type: Literal[CredentialType.SAS] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Shared Access Signature (SAS) credential""" + sas_token: Optional[str] = rest_field(name="SAS", visibility=["read"]) + """SAS token.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.SAS # type: ignore + + +class Schedule(_Model): + """Schedule model. + + :ivar id: Identifier of the schedule. Required. + :vartype id: str + :ivar display_name: Name of the schedule. + :vartype display_name: str + :ivar description: Description of the schedule. + :vartype description: str + :ivar enabled: Enabled status of the schedule. Required. + :vartype enabled: bool + :ivar provisioning_status: Provisioning status of the schedule. Known values are: "Creating", + "Updating", "Deleting", "Succeeded", and "Failed". + :vartype provisioning_status: str or ~azure.ai.projects.models.ScheduleProvisioningStatus + :ivar trigger: Trigger for the schedule. Required. + :vartype trigger: ~azure.ai.projects.models.Trigger + :ivar task: Task for the schedule. Required. + :vartype task: ~azure.ai.projects.models.ScheduleTask + :ivar tags: Schedule's tags. Unlike properties, tags are fully mutable. + :vartype tags: dict[str, str] + :ivar properties: Schedule's properties. Unlike tags, properties are add-only. Once added, a + property cannot be removed. + :vartype properties: dict[str, str] + :ivar system_data: System metadata for the resource. Required. + :vartype system_data: dict[str, str] + """ + + id: str = rest_field(visibility=["read"]) + """Identifier of the schedule. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the schedule.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the schedule.""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Enabled status of the schedule. Required.""" + provisioning_status: Optional[Union[str, "_models.ScheduleProvisioningStatus"]] = rest_field( + name="provisioningStatus", visibility=["read"] + ) + """Provisioning status of the schedule. Known values are: \"Creating\", \"Updating\", + \"Deleting\", \"Succeeded\", and \"Failed\".""" + trigger: "_models.Trigger" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Trigger for the schedule. Required.""" + task: "_models.ScheduleTask" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Task for the schedule. Required.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Schedule's tags. Unlike properties, tags are fully mutable.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Schedule's properties. Unlike tags, properties are add-only. Once added, a property cannot be + removed.""" + system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) + """System metadata for the resource. Required.""" + + @overload + def __init__( + self, + *, + enabled: bool, + trigger: "_models.Trigger", + task: "_models.ScheduleTask", + display_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ScheduleRun(_Model): + """Schedule run model. + + :ivar id: Identifier of the schedule run. Required. + :vartype id: str + :ivar schedule_id: Identifier of the schedule. Required. + :vartype schedule_id: str + :ivar success: Trigger success status of the schedule run. Required. + :vartype success: bool + :ivar trigger_time: Trigger time of the schedule run. + :vartype trigger_time: str + :ivar error: Error information for the schedule run. + :vartype error: str + :ivar properties: Properties of the schedule run. Required. + :vartype properties: dict[str, str] + """ + + id: str = rest_field(visibility=["read"]) + """Identifier of the schedule run. Required.""" + schedule_id: str = rest_field(name="scheduleId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier of the schedule. Required.""" + success: bool = rest_field(visibility=["read"]) + """Trigger success status of the schedule run. Required.""" + trigger_time: Optional[str] = rest_field( + name="triggerTime", visibility=["read", "create", "update", "delete", "query"] + ) + """Trigger time of the schedule run.""" + error: Optional[str] = rest_field(visibility=["read"]) + """Error information for the schedule run.""" + properties: dict[str, str] = rest_field(visibility=["read"]) + """Properties of the schedule run. Required.""" + + @overload + def __init__( + self, + *, + schedule_id: str, + trigger_time: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SharepointAgentTool(Tool, discriminator="sharepoint_grounding_preview"): + """The input definition information for a sharepoint tool as used to configure an agent. + + :ivar type: The object type, which is always 'sharepoint_grounding'. Required. + :vartype type: str or ~azure.ai.projects.models.SHAREPOINT_GROUNDING_PREVIEW + :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required. + :vartype sharepoint_grounding_preview: + ~azure.ai.projects.models.SharepointGroundingToolParameters + """ + + type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'sharepoint_grounding'. Required.""" + sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The sharepoint grounding tool parameters. Required.""" + + @overload + def __init__( + self, + *, + sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore + + +class SharepointGroundingToolParameters(_Model): + """The sharepoint grounding tool parameters. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. + :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] + """ + + project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" + + @overload + def __init__( + self, + *, + project_connections: Optional[list["_models.ToolProjectConnection"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StructuredInputDefinition(_Model): + """An structured input that can participate in prompt template substitutions and tool argument + binding. + + :ivar description: A human-readable description of the input. + :vartype description: str + :ivar default_value: The default value for the input if no run-time value is provided. + :vartype default_value: any + :ivar tool_argument_bindings: When provided, the input value is bound to the specified tool + arguments. + :vartype tool_argument_bindings: list[~azure.ai.projects.models.ToolArgumentBinding] + :ivar schema: The JSON schema for the structured input (optional). + :vartype schema: any + :ivar required: Whether the input property is required when the agent is invoked. + :vartype required: bool + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the input.""" + default_value: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The default value for the input if no run-time value is provided.""" + tool_argument_bindings: Optional[list["_models.ToolArgumentBinding"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """When provided, the input value is bound to the specified tool arguments.""" + schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema for the structured input (optional).""" + required: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the input property is required when the agent is invoked.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + default_value: Optional[Any] = None, + tool_argument_bindings: Optional[list["_models.ToolArgumentBinding"]] = None, + schema: Optional[Any] = None, + required: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StructuredOutputDefinition(_Model): + """A structured output that can be produced by the agent. + + :ivar name: The name of the structured output. Required. + :vartype name: str + :ivar description: A description of the output to emit. Used by the model to determine when to + emit the output. Required. + :vartype description: str + :ivar schema: The JSON schema for the structured output. Required. + :vartype schema: dict[str, any] + :ivar strict: Whether to enforce strict validation. Default ``true``. Required. + :vartype strict: bool + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the structured output. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of the output to emit. Used by the model to determine when to emit the output. + Required.""" + schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema for the structured output. Required.""" + strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enforce strict validation. Default ``true``. Required.""" + + @overload + def __init__( + self, + *, + name: str, + description: str, + schema: dict[str, Any], + strict: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StructuredOutputsItemResource(ItemResource, discriminator="structured_outputs"): + """StructuredOutputsItemResource. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.STRUCTURED_OUTPUTS + :ivar output: The structured output captured during the response. Required. + :vartype output: any + """ + + type: Literal[ItemType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + output: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The structured output captured during the response. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + output: Any, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.STRUCTURED_OUTPUTS # type: ignore + + +class TaxonomyCategory(_Model): + """Taxonomy category definition. + + :ivar id: Unique identifier of the taxonomy category. Required. + :vartype id: str + :ivar name: Name of the taxonomy category. Required. + :vartype name: str + :ivar description: Description of the taxonomy category. + :vartype description: str + :ivar risk_category: Risk category associated with this taxonomy category. Required. Known + values are: "HateUnfairness", "Violence", "Sexual", and "SelfHarm". + :vartype risk_category: str or ~azure.ai.projects.models.RiskCategory + :ivar sub_categories: List of taxonomy sub categories. Required. + :vartype sub_categories: list[~azure.ai.projects.models.TaxonomySubCategory] + :ivar properties: Additional properties for the taxonomy category. + :vartype properties: dict[str, str] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier of the taxonomy category. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the taxonomy category. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the taxonomy category.""" + risk_category: Union[str, "_models.RiskCategory"] = rest_field( + name="riskCategory", visibility=["read", "create", "update", "delete", "query"] + ) + """Risk category associated with this taxonomy category. Required. Known values are: + \"HateUnfairness\", \"Violence\", \"Sexual\", and \"SelfHarm\".""" + sub_categories: list["_models.TaxonomySubCategory"] = rest_field( + name="subCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of taxonomy sub categories. Required.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional properties for the taxonomy category.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + risk_category: Union[str, "_models.RiskCategory"], + sub_categories: list["_models.TaxonomySubCategory"], + description: Optional[str] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TaxonomySubCategory(_Model): + """Taxonomy sub-category definition. + + :ivar id: Unique identifier of the taxonomy sub-category. Required. + :vartype id: str + :ivar name: Name of the taxonomy sub-category. Required. + :vartype name: str + :ivar description: Description of the taxonomy sub-category. + :vartype description: str + :ivar enabled: List of taxonomy items under this sub-category. Required. + :vartype enabled: bool + :ivar properties: Additional properties for the taxonomy sub-category. + :vartype properties: dict[str, str] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier of the taxonomy sub-category. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the taxonomy sub-category. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the taxonomy sub-category.""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of taxonomy items under this sub-category. Required.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional properties for the taxonomy sub-category.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + enabled: bool, + description: Optional[str] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolArgumentBinding(_Model): + """ToolArgumentBinding. + + :ivar tool_name: The name of the tool to participate in the argument binding. If not provided, + then all tools with matching arguments will participate in binding. + :vartype tool_name: str + :ivar argument_name: The name of the argument within the tool. Required. + :vartype argument_name: str + """ + + tool_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to participate in the argument binding. If not provided, then all tools + with matching arguments will participate in binding.""" + argument_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the argument within the tool. Required.""" + + @overload + def __init__( + self, + *, + argument_name: str, + tool_name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolChoiceObject(_Model): + """ToolChoiceObject. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ToolChoiceObjectCodeInterpreter, ToolChoiceObjectComputer, ToolChoiceObjectFileSearch, + ToolChoiceObjectFunction, ToolChoiceObjectImageGen, ToolChoiceObjectMCP, + ToolChoiceObjectWebSearch + + :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", + "web_search_preview", "image_generation", "code_interpreter", and "mcp". + :vartype type: str or ~azure.ai.projects.models.ToolChoiceObjectType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", + \"web_search_preview\", \"image_generation\", \"code_interpreter\", and \"mcp\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolChoiceObjectCodeInterpreter(ToolChoiceObject, discriminator="code_interpreter"): + """ToolChoiceObjectCodeInterpreter. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER + """ + + type: Literal[ToolChoiceObjectType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.CODE_INTERPRETER # type: ignore + + +class ToolChoiceObjectComputer(ToolChoiceObject, discriminator="computer_use_preview"): + """ToolChoiceObjectComputer. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER + """ + + type: Literal[ToolChoiceObjectType.COMPUTER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.COMPUTER # type: ignore + + +class ToolChoiceObjectFileSearch(ToolChoiceObject, discriminator="file_search"): + """ToolChoiceObjectFileSearch. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH + """ + + type: Literal[ToolChoiceObjectType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.FILE_SEARCH # type: ignore + + +class ToolChoiceObjectFunction(ToolChoiceObject, discriminator="function"): + """Use this option to force the model to call a specific function. + + :ivar type: For function calling, the type is always ``function``. Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION + :ivar name: The name of the function to call. Required. + :vartype name: str + """ + + type: Literal[ToolChoiceObjectType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """For function calling, the type is always ``function``. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to call. Required.""" + + @overload + def __init__( + self, + *, + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.FUNCTION # type: ignore + + +class ToolChoiceObjectImageGen(ToolChoiceObject, discriminator="image_generation"): + """ToolChoiceObjectImageGen. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION + """ + + type: Literal[ToolChoiceObjectType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.IMAGE_GENERATION # type: ignore + + +class ToolChoiceObjectMCP(ToolChoiceObject, discriminator="mcp"): + """Use this option to force the model to call a specific tool on a remote MCP server. + + :ivar type: For MCP tools, the type is always ``mcp``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP + :ivar server_label: The label of the MCP server to use. Required. + :vartype server_label: str + :ivar name: The name of the tool to call on the server. + :vartype name: str + """ + + type: Literal[ToolChoiceObjectType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """For MCP tools, the type is always ``mcp``. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server to use. Required.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to call on the server.""" + + @overload + def __init__( + self, + *, + server_label: str, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.MCP # type: ignore + + +class ToolChoiceObjectWebSearch(ToolChoiceObject, discriminator="web_search_preview"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH + """ + + type: Literal[ToolChoiceObjectType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.WEB_SEARCH # type: ignore + + +class ToolDescription(_Model): + """Description of a tool that can be used by an agent. + + :ivar name: The name of the tool. + :vartype name: str + :ivar description: A brief description of the tool's purpose. + :vartype description: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A brief description of the tool's purpose.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolProjectConnection(_Model): + """A project connection resource. + + :ivar project_connection_id: A project connection in a ToolProjectConnectionList attached to + this tool. Required. + :vartype project_connection_id: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A project connection in a ToolProjectConnectionList attached to this tool. Required.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolProjectConnectionList(_Model): + """A set of project connection resources currently used by either the ``bing_grounding``, + ``fabric_dataagent``, or ``sharepoint_grounding`` tools. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. + :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] + """ + + project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" + + @overload + def __init__( + self, + *, + project_connections: Optional[list["_models.ToolProjectConnection"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TopLogProb(_Model): + """The top log probability of a token. + + :ivar token: Required. + :vartype token: str + :ivar logprob: Required. + :vartype logprob: float + :ivar bytes: Required. + :vartype bytes: list[int] + """ + + token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + token: str, + logprob: float, + bytes: list[int], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UserProfileMemoryItem(MemoryItem, discriminator="user_profile"): + """A memory item specifically containing user profile information extracted from conversations, + such as preferences, interests, and personal details. + + :ivar memory_id: The unique ID of the memory item. Required. + :vartype memory_id: str + :ivar updated_at: The last update time of the memory item. Required. + :vartype updated_at: ~datetime.datetime + :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :vartype scope: str + :ivar content: The content of the memory. Required. + :vartype content: str + :ivar kind: The kind of the memory item. Required. User profile information extracted from + conversations. + :vartype kind: str or ~azure.ai.projects.models.USER_PROFILE + """ + + kind: Literal[MemoryItemKind.USER_PROFILE] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind of the memory item. Required. User profile information extracted from conversations.""" + + @overload + def __init__( + self, + *, + memory_id: str, + updated_at: datetime.datetime, + scope: str, + content: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = MemoryItemKind.USER_PROFILE # type: ignore + + +class VectorStoreFileAttributes(_Model): + """Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. Keys are strings + with a maximum length of 64 characters. Values are strings with a maximum + length of 512 characters, booleans, or numbers. + + """ + + +class WebSearchAction(_Model): + """WebSearchAction. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + WebSearchActionFind, WebSearchActionOpenPage, WebSearchActionSearch + + :ivar type: Required. Known values are: "search", "open_page", and "find". + :vartype type: str or ~azure.ai.projects.models.WebSearchActionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"search\", \"open_page\", and \"find\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WebSearchActionFind(WebSearchAction, discriminator="find"): + """Action type "find": Searches for a pattern within a loaded page. + + :ivar type: The action type. Required. + :vartype type: str or ~azure.ai.projects.models.FIND + :ivar url: The URL of the page searched for the pattern. Required. + :vartype url: str + :ivar pattern: The pattern or text to search for within the page. Required. + :vartype pattern: str + """ + + type: Literal[WebSearchActionType.FIND] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The action type. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the page searched for the pattern. Required.""" + pattern: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The pattern or text to search for within the page. Required.""" + + @overload + def __init__( + self, + *, + url: str, + pattern: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = WebSearchActionType.FIND # type: ignore + + +class WebSearchActionOpenPage(WebSearchAction, discriminator="open_page"): + """Action type "open_page" - Opens a specific URL from search results. + + :ivar type: The action type. Required. + :vartype type: str or ~azure.ai.projects.models.OPEN_PAGE + :ivar url: The URL opened by the model. Required. + :vartype url: str + """ + + type: Literal[WebSearchActionType.OPEN_PAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The action type. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL opened by the model. Required.""" + + @overload + def __init__( + self, + *, + url: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = WebSearchActionType.OPEN_PAGE # type: ignore + + +class WebSearchActionSearch(WebSearchAction, discriminator="search"): + """Action type "search" - Performs a web search query. + + :ivar type: The action type. Required. + :vartype type: str or ~azure.ai.projects.models.SEARCH + :ivar query: The search query. Required. + :vartype query: str + """ + + type: Literal[WebSearchActionType.SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The action type. Required.""" + query: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The search query. Required.""" + + @overload + def __init__( + self, + *, + query: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = WebSearchActionType.SEARCH # type: ignore + + +class WebSearchPreviewTool(Tool, discriminator="web_search_preview"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar type: The type of the web search tool. One of ``web_search_preview`` or + ``web_search_preview_2025_03_11``. Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_PREVIEW + :ivar user_location: The user's location. + :vartype user_location: ~azure.ai.projects.models.Location + :ivar search_context_size: High level guidance for the amount of context window space to use + for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of + the following types: Literal["low"], Literal["medium"], Literal["high"] + :vartype search_context_size: str or str or str + """ + + type: Literal[ToolType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the web search tool. One of ``web_search_preview`` or + ``web_search_preview_2025_03_11``. Required.""" + user_location: Optional["_models.Location"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The user's location.""" + search_context_size: Optional[Literal["low", "medium", "high"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """High level guidance for the amount of context window space to use for the search. One of + ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of the following types: + Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]""" + + @overload + def __init__( + self, + *, + user_location: Optional["_models.Location"] = None, + search_context_size: Optional[Literal["low", "medium", "high"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.WEB_SEARCH_PREVIEW # type: ignore + + +class WebSearchToolCallItemParam(ItemParam, discriminator="web_search_call"): + """The results of a web search tool call. See the + `web search guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL + :ivar action: An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. + :vartype action: ~azure.ai.projects.models.WebSearchAction + """ + + type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required.""" + + @overload + def __init__( + self, + *, + action: "_models.WebSearchAction", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.WEB_SEARCH_CALL # type: ignore + + +class WebSearchToolCallItemResource(ItemResource, discriminator="web_search_call"): + """The results of a web search tool call. See the + `web search guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL + :ivar status: The status of the web search tool call. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] + :vartype status: str or str or str or str + :ivar action: An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. + :vartype action: ~azure.ai.projects.models.WebSearchAction + """ + + type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the web search tool call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]""" + action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "failed"], + action: "_models.WebSearchAction", + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.WEB_SEARCH_CALL # type: ignore + + +class WeeklyRecurrenceSchedule(RecurrenceSchedule, discriminator="Weekly"): + """Weekly recurrence schedule. + + :ivar type: Weekly recurrence type. Required. Weekly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.WEEKLY + :ivar days_of_week: Days of the week for the recurrence schedule. Required. + :vartype days_of_week: list[str or ~azure.ai.projects.models.DayOfWeek] + """ + + type: Literal[RecurrenceType.WEEKLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Weekly recurrence type. Required. Weekly recurrence pattern.""" + days_of_week: list[Union[str, "_models.DayOfWeek"]] = rest_field( + name="daysOfWeek", visibility=["read", "create", "update", "delete", "query"] + ) + """Days of the week for the recurrence schedule. Required.""" + + @overload + def __init__( + self, + *, + days_of_week: list[Union[str, "_models.DayOfWeek"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.WEEKLY # type: ignore + + +class WorkflowDefinition(AgentDefinition, discriminator="workflow"): + """The workflow specification in CSDL format. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.WORKFLOW + :ivar trigger: (Deprecated) The CSDL trigger definition. Use ``workflow`` property instead to + send CSDL yaml definition inline. + :vartype trigger: dict[str, any] + :ivar workflow: The CSDL YAML definition of the workflow. + :vartype workflow: str + """ + + kind: Literal[AgentKind.WORKFLOW] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + trigger: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """(Deprecated) The CSDL trigger definition. Use ``workflow`` property instead to send CSDL yaml + definition inline.""" + workflow: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The CSDL YAML definition of the workflow.""" + + @overload + def __init__( + self, + *, + rai_config: Optional["_models.RaiConfig"] = None, + trigger: Optional[dict[str, Any]] = None, + workflow: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.WORKFLOW # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py new file mode 100644 index 000000000000..6cd95db87150 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py @@ -0,0 +1,39 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List, Dict +from ._patch_evaluations import EvaluatorIds +from ._models import CustomCredential as CustomCredentialGenerated + + +class CustomCredential(CustomCredentialGenerated): + """Custom credential definition. + + :ivar type: The credential type. Always equals CredentialType.CUSTOM. Required. + :vartype type: str or ~azure.ai.projects.models.CredentialType + :ivar credential_keys: The secret custom credential keys. Required. + :vartype credential_keys: dict[str, str] + """ + + credential_keys: Dict[str, str] = {} + """The secret custom credential keys. Required.""" + + +__all__: List[str] = [ + "EvaluatorIds", + "CustomCredential", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py new file mode 100644 index 000000000000..d362c28d0d8a --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py @@ -0,0 +1,48 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from enum import Enum + +from azure.core import CaseInsensitiveEnumMeta + + +class EvaluatorIds(str, Enum, metaclass=CaseInsensitiveEnumMeta): + RELEVANCE = "azureai://built-in/evaluators/relevance" + HATE_UNFAIRNESS = "azureai://built-in/evaluators/hate_unfairness" + VIOLENCE = "azureai://built-in/evaluators/violence" + GROUNDEDNESS = "azureai://built-in/evaluators/groundedness" + GROUNDEDNESS_PRO = "azureai://built-in/evaluators/groundedness_pro" + BLEU_SCORE = "azureai://built-in/evaluators/bleu_score" + CODE_VULNERABILITY = "azureai://built-in/evaluators/code_vulnerability" + COHERENCE = "azureai://built-in/evaluators/coherence" + CONTENT_SAFETY = "azureai://built-in/evaluators/content_safety" + F1_SCORE = "azureai://built-in/evaluators/f1_score" + FLUENCY = "azureai://built-in/evaluators/fluency" + GLEU_SCORE = "azureai://built-in/evaluators/gleu_score" + INDIRECT_ATTACK = "azureai://built-in/evaluators/indirect_attack" + INTENT_RESOLUTION = "azureai://built-in/evaluators/intent_resolution" + METEOR_SCORE = "azureai://built-in/evaluators/meteor_score" + PROTECTED_MATERIAL = "azureai://built-in/evaluators/protected_material" + RETRIEVAL = "azureai://built-in/evaluators/retrieval" + ROUGE_SCORE = "azureai://built-in/evaluators/rouge_score" + SELF_HARM = "azureai://built-in/evaluators/self_harm" + SEXUAL = "azureai://built-in/evaluators/sexual" + SIMILARITY = "azureai://built-in/evaluators/similarity" + QA = "azureai://built-in/evaluators/qa" + DOCUMENT_RETRIEVAL = "azureai://built-in/evaluators/document_retrieval" + TASK_ADHERENCE = "azureai://built-in/evaluators/task_adherence" + TOOL_CALL_ACCURACY = "azureai://built-in/evaluators/tool_call_accuracy" + UNGROUNDED_ATTRIBUTES = "azureai://built-in/evaluators/ungrounded_attributes" + RESPONSE_COMPLETENESS = "azureai://built-in/evaluators/response_completeness" + # AOAI Graders + LABEL_GRADER = "azureai://built-in/evaluators/azure-openai/label_grader" + STRING_CHECK_GRADER = "azureai://built-in/evaluators/azure-openai/string_check_grader" + TEXT_SIMILARITY_GRADER = "azureai://built-in/evaluators/azure-openai/text_similarity_grader" + GENERAL_GRADER = "azureai://built-in/evaluators/azure-openai/custom_grader" + SCORE_MODEL_GRADER = "azureai://built-in/evaluators/azure-openai/score_model_grader" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py new file mode 100644 index 000000000000..03b8c4ce34a0 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py @@ -0,0 +1,1237 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except, import-error, no-value-for-parameter + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py new file mode 100644 index 000000000000..45a3e44e45cb --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py @@ -0,0 +1,2030 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized |= target_obj.additional_properties + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(list[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/py.typed b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py new file mode 100644 index 000000000000..4b28599e3bce --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -0,0 +1,314 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=broad-exception-caught,unused-argument,logging-fstring-interpolation,too-many-statements,too-many-return-statements +import inspect +import json +import os +import traceback +from abc import abstractmethod +from typing import Any, AsyncGenerator, Generator, Union + +import uvicorn +from opentelemetry import context as otel_context, trace +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator +from starlette.applications import Starlette +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.middleware.cors import CORSMiddleware +from starlette.requests import Request +from starlette.responses import JSONResponse, Response, StreamingResponse +from starlette.routing import Route +from starlette.types import ASGIApp + +from ..constants import Constants +from ..logger import get_logger, request_context +from ..models import ( + Response as OpenAIResponse, + ResponseStreamEvent, +) +from .common.agent_run_context import AgentRunContext + +logger = get_logger() +DEBUG_ERRORS = os.environ.get(Constants.AGENT_DEBUG_ERRORS, "false").lower() == "true" + + +class AgentRunContextMiddleware(BaseHTTPMiddleware): + def __init__(self, app: ASGIApp): + super().__init__(app) + + async def dispatch(self, request: Request, call_next): + if request.url.path in ("/runs", "/responses"): + try: + self.set_request_id_to_context_var(request) + payload = await request.json() + except Exception as e: + logger.error(f"Invalid JSON payload: {e}") + return JSONResponse({"error": f"Invalid JSON payload: {e}"}, status_code=400) + try: + request.state.agent_run_context = AgentRunContext(payload) + self.set_run_context_to_context_var(request.state.agent_run_context) + except Exception as e: + logger.error(f"Context build failed: {e}.", exc_info=True) + return JSONResponse({"error": f"Context build failed: {e}"}, status_code=500) + return await call_next(request) + + def set_request_id_to_context_var(self, request): + request_id = request.headers.get("X-Request-Id", None) + if request_id: + ctx = request_context.get() or {} + ctx["azure.ai.agentshosting.x-request-id"] = request_id + request_context.set(ctx) + + def set_run_context_to_context_var(self, run_context): + agent_id = "" + agent_obj = run_context.get_agent_id_object() + if agent_obj: + agent_name = getattr(agent_obj, "name", "") + agent_version = getattr(agent_obj, "version", "") + agent_id = f"{agent_name}:{agent_version}" + + res = { + "azure.ai.agentshosting.response_id": run_context.response_id or "", + "azure.ai.agentshosting.conversation_id": run_context.conversation_id or "", + "azure.ai.agentshosting.streaming": str(run_context.stream or False), + "gen_ai.agent.id": agent_id, + "gen_ai.provider.name": "AzureAI Hosted Agents", + "gen_ai.response.id": run_context.response_id or "", + } + ctx = request_context.get() or {} + ctx.update(res) + request_context.set(ctx) + + +class FoundryCBAgent: + def __init__(self): + async def runs_endpoint(request): + # Set up tracing context and span + context = request.state.agent_run_context + ctx = request_context.get() + with self.tracer.start_as_current_span( + name=f"ContainerAgentsAdapter-{context.response_id}", + attributes=ctx, + kind=trace.SpanKind.SERVER, + ): + try: + logger.info("Start processing CreateResponse request:") + + context_carrier = {} + TraceContextTextMapPropagator().inject(context_carrier) + + resp = await self.agent_run(context) + + if inspect.isgenerator(resp): + # Prefetch first event to allow 500 status if generation fails immediately + try: + first_event = next(resp) + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Generator initialization failed: %s\n%s", e, traceback.format_exc()) + return JSONResponse({"error": err_msg}, status_code=500) + + def gen(): + ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) + token = otel_context.attach(ctx) + error_sent = False + try: + # yield prefetched first event + yield _event_to_sse_chunk(first_event) + for event in resp: + yield _event_to_sse_chunk(event) + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Error in non-async generator: %s\n%s", e, traceback.format_exc()) + payload = {"error": err_msg} + yield f"event: error\ndata: {json.dumps(payload)}\n\n" + yield "data: [DONE]\n\n" + error_sent = True + finally: + logger.info("End of processing CreateResponse request:") + otel_context.detach(token) + if not error_sent: + yield "data: [DONE]\n\n" + + return StreamingResponse(gen(), media_type="text/event-stream") + if inspect.isasyncgen(resp): + # Prefetch first async event to allow early 500 + try: + first_event = await resp.__anext__() + except StopAsyncIteration: + # No items produced; treat as empty successful stream + def empty_gen(): + yield "data: [DONE]\n\n" + + return StreamingResponse(empty_gen(), media_type="text/event-stream") + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Async generator initialization failed: %s\n%s", e, traceback.format_exc()) + return JSONResponse({"error": err_msg}, status_code=500) + + async def gen_async(): + ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) + token = otel_context.attach(ctx) + error_sent = False + try: + # yield prefetched first event + yield _event_to_sse_chunk(first_event) + async for event in resp: + yield _event_to_sse_chunk(event) + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Error in async generator: %s\n%s", e, traceback.format_exc()) + payload = {"error": err_msg} + yield f"event: error\ndata: {json.dumps(payload)}\n\n" + yield "data: [DONE]\n\n" + error_sent = True + finally: + logger.info("End of processing CreateResponse request.") + otel_context.detach(token) + if not error_sent: + yield "data: [DONE]\n\n" + + return StreamingResponse(gen_async(), media_type="text/event-stream") + logger.info("End of processing CreateResponse request.") + return JSONResponse(resp.as_dict()) + except Exception as e: + # TODO: extract status code from exception + logger.error(f"Error processing CreateResponse request: {traceback.format_exc()}") + return JSONResponse({"error": str(e)}, status_code=500) + + async def liveness_endpoint(request): + result = await self.agent_liveness(request) + return _to_response(result) + + async def readiness_endpoint(request): + result = await self.agent_readiness(request) + return _to_response(result) + + routes = [ + Route("/runs", runs_endpoint, methods=["POST"], name="agent_run"), + Route("/responses", runs_endpoint, methods=["POST"], name="agent_response"), + Route("/liveness", liveness_endpoint, methods=["GET"], name="agent_liveness"), + Route("/readiness", readiness_endpoint, methods=["GET"], name="agent_readiness"), + ] + + self.app = Starlette(routes=routes) + self.app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + self.app.add_middleware(AgentRunContextMiddleware) + + @self.app.on_event("startup") + async def attach_appinsights_logger(): + import logging + + for handler in logger.handlers: + if handler.name == "appinsights_handler": + for logger_name in ["uvicorn", "uvicorn.error", "uvicorn.access"]: + uv_logger = logging.getLogger(logger_name) + uv_logger.addHandler(handler) + uv_logger.setLevel(logger.level) + uv_logger.propagate = False + + self.tracer = None + + @abstractmethod + async def agent_run( + self, context: AgentRunContext + ) -> Union[OpenAIResponse, Generator[ResponseStreamEvent, Any, Any], AsyncGenerator[ResponseStreamEvent, Any]]: + raise NotImplementedError + + async def agent_liveness(self, request) -> Union[Response, dict]: + return Response(status_code=200) + + async def agent_readiness(self, request) -> Union[Response, dict]: + return {"status": "ready"} + + async def run_async( + self, + port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088)), + ) -> None: + """ + Awaitable server starter for use **inside** an existing event loop. + + :param port: Port to listen on. + :type port: int + """ + self.init_tracing() + config = uvicorn.Config(self.app, host="0.0.0.0", port=port, loop="asyncio") + server = uvicorn.Server(config) + logger.info(f"Starting FoundryCBAgent server async on port {port}") + await server.serve() + + def run(self, port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088))) -> None: + """ + Start a Starlette server on localhost: exposing: + POST /runs + POST /responses + GET /liveness + GET /readiness + + :param port: Port to listen on. + :type port: int + """ + self.init_tracing() + logger.info(f"Starting FoundryCBAgent server on port {port}") + uvicorn.run(self.app, host="0.0.0.0", port=port) + + def init_tracing(self): + exporter = os.environ.get(Constants.OTEL_EXPORTER_ENDPOINT) + app_insights_conn_str = os.environ.get(Constants.APPLICATION_INSIGHTS_CONNECTION_STRING) + if exporter or app_insights_conn_str: + from opentelemetry.sdk.resources import Resource + from opentelemetry.sdk.trace import TracerProvider + + resource = Resource.create(self.get_trace_attributes()) + provider = TracerProvider(resource=resource) + if exporter: + self.setup_otlp_exporter(exporter, provider) + if app_insights_conn_str: + self.setup_application_insights_exporter(app_insights_conn_str, provider) + trace.set_tracer_provider(provider) + self.init_tracing_internal(exporter_endpoint=exporter, app_insights_conn_str=app_insights_conn_str) + self.tracer = trace.get_tracer(__name__) + + def get_trace_attributes(self): + return { + "service.name": "azure.ai.agentshosting", + } + + def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None): + pass + + def setup_application_insights_exporter(self, connection_string, provider): + from opentelemetry.sdk.trace.export import BatchSpanProcessor + + from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter + + exporter_instance = AzureMonitorTraceExporter.from_connection_string(connection_string) + processor = BatchSpanProcessor(exporter_instance) + provider.add_span_processor(processor) + logger.info("Tracing setup with Application Insights exporter.") + + def setup_otlp_exporter(self, endpoint, provider): + from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + from opentelemetry.sdk.trace.export import BatchSpanProcessor + + exporter_instance = OTLPSpanExporter(endpoint=endpoint) + processor = BatchSpanProcessor(exporter_instance) + provider.add_span_processor(processor) + logger.info(f"Tracing setup with OTLP exporter: {endpoint}") + + +def _event_to_sse_chunk(event: ResponseStreamEvent) -> str: + event_data = json.dumps(event.as_dict()) + if event.type: + return f"event: {event.type}\ndata: {event_data}\n\n" + return f"data: {event_data}\n\n" + + +def _to_response(result: Union[Response, dict]) -> Response: + return result if isinstance(result, Response) else JSONResponse(result) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py new file mode 100644 index 000000000000..6fae56f0027d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py @@ -0,0 +1,76 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from ...logger import get_logger +from ...models import CreateResponse +from ...models.projects import AgentId, AgentReference, ResponseConversation1 +from .id_generator.foundry_id_generator import FoundryIdGenerator +from .id_generator.id_generator import IdGenerator + +logger = get_logger() + + +class AgentRunContext: + def __init__(self, payload: dict): + self._raw_payload = payload + self._request = _deserialize_create_response(payload) + self._id_generator = FoundryIdGenerator.from_request(payload) + self._response_id = self._id_generator.response_id + self._conversation_id = self._id_generator.conversation_id + self._stream = self.request.get("stream", False) + + @property + def raw_payload(self) -> dict: + return self._raw_payload + + @property + def request(self) -> CreateResponse: + return self._request + + @property + def id_generator(self) -> IdGenerator: + return self._id_generator + + @property + def response_id(self) -> str: + return self._response_id + + @property + def conversation_id(self) -> str: + return self._conversation_id + + @property + def stream(self) -> bool: + return self._stream + + def get_agent_id_object(self) -> AgentId: + agent = self.request.get("agent") + if not agent: + return None # type: ignore + return AgentId( + { + "type": agent.type, + "name": agent.name, + "version": agent.version, + } + ) + + def get_conversation_object(self) -> ResponseConversation1: + if not self._conversation_id: + return None # type: ignore + return ResponseConversation1(id=self._conversation_id) + + +def _deserialize_create_response(payload: dict) -> CreateResponse: + _deserialized = CreateResponse(**payload) + + raw_agent_reference = payload.get("agent") + if raw_agent_reference: + _deserialized["agent"] = _deserialize_agent_reference(raw_agent_reference) + return _deserialized + + +def _deserialize_agent_reference(payload: dict) -> AgentReference: + if not payload: + return None # type: ignore + return AgentReference(**payload) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py new file mode 100644 index 000000000000..fdf8caba9ef5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py new file mode 100644 index 000000000000..910a7c481daa --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py @@ -0,0 +1,136 @@ +# pylint: disable=docstring-missing-return,docstring-missing-param,docstring-missing-rtype +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import base64 +import os +import re +from typing import Optional + +from .id_generator import IdGenerator + +_WATERMARK_RE = re.compile(r"^[A-Za-z0-9]*$") + + +class FoundryIdGenerator(IdGenerator): + """ + Python port of the C# FoundryIdGenerator. + + Notable behaviors preserved: + - Secure, alphanumeric entropy via base64 filtering, retrying until exact length. + - Watermark must be strictly alphanumeric; inserted mid-entropy. + - Only one delimiter (default "_") after the prefix; no delimiter between entropy and partition key. + - Partition key is the last N characters of the second ID segment (post-delimiter). + """ + + def __init__(self, response_id: Optional[str], conversation_id: Optional[str]): + self.response_id = response_id or self._new_id("resp") + self.conversation_id = conversation_id or self._new_id("conv") + self._partition_id = self._extract_partition_id(self.conversation_id) + + @classmethod + def from_request(cls, payload: dict) -> "FoundryIdGenerator": + response_id = payload.get("metadata", {}).get("response_id", None) + conv_id_raw = payload.get("conversation", None) + if isinstance(conv_id_raw, str): + conv_id = conv_id_raw + elif isinstance(conv_id_raw, dict): + conv_id = conv_id_raw.get("id", None) + else: + conv_id = None + return cls(response_id, conv_id) + + def generate(self, category: Optional[str] = None) -> str: + prefix = "id" if not category else category + return self._new_id(prefix, partition_key=self._partition_id) + + # --- Static helpers (mirror C# private static methods) -------------------- + + @staticmethod + def _new_id( + prefix: str, + string_length: int = 32, + partition_key_length: int = 18, + infix: Optional[str] = "", + watermark: str = "", + delimiter: str = "_", + partition_key: Optional[str] = None, + partition_key_hint: str = "", + ) -> str: + """ + Generates a new ID. + + Format matches the C# logic: + f"{prefix}{delimiter}{infix}{partitionKey}{entropy}" + (i.e., exactly one delimiter after prefix; no delimiter between entropy and partition key) + """ + entropy = FoundryIdGenerator._secure_entropy(string_length) + + if partition_key is not None: + pkey = partition_key + elif partition_key_hint: + pkey = FoundryIdGenerator._extract_partition_id( + partition_key_hint, + string_length=string_length, + partition_key_length=partition_key_length, + delimiter=delimiter, + ) + else: + pkey = FoundryIdGenerator._secure_entropy(partition_key_length) + + if watermark: + if not _WATERMARK_RE.fullmatch(watermark): + raise ValueError(f"Only alphanumeric characters may be in watermark: {watermark}") + half = string_length // 2 + entropy = f"{entropy[:half]}{watermark}{entropy[half:]}" + + infix = infix or "" + prefix_part = f"{prefix}{delimiter}" if prefix else "" + return f"{prefix_part}{entropy}{infix}{pkey}" + + @staticmethod + def _secure_entropy(string_length: int) -> str: + """ + Generates a secure random alphanumeric string of exactly `string_length`. + Re-tries whole generation until the filtered base64 string is exactly the desired length, + matching the C# behavior. + """ + if string_length < 1: + raise ValueError("Must be greater than or equal to 1") + + while True: + # Use cryptographically secure bytes; base64 then filter to alnum. + buf = os.urandom(string_length) + encoded = base64.b64encode(buf).decode("ascii") + alnum = "".join(ch for ch in encoded if ch.isalnum()) + if len(alnum) >= string_length: + return alnum[:string_length] + # else: retry, same as the C# loop which discards and regenerates + + @staticmethod + def _extract_partition_id( + id_str: str, + string_length: int = 32, + partition_key_length: int = 18, + delimiter: str = "_", + ) -> str: + """ + Extracts partition key from an existing ID. + + Expected shape (per C# logic): "_" + We take the last `partition_key_length` characters from the *second* segment. + """ + if not id_str: + raise ValueError("Id cannot be null or empty") + + parts = [p for p in id_str.split(delimiter) if p] # remove empty entries like C# Split(..., RemoveEmptyEntries) + if len(parts) < 2: + raise ValueError(f"Id '{id_str}' does not contain a valid partition key.") + + segment = parts[1] + if len(segment) < string_length + partition_key_length: + raise ValueError(f"Id '{id_str}' does not contain a valid id.") + + return segment[-partition_key_length:] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py new file mode 100644 index 000000000000..48f0d9add17d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py @@ -0,0 +1,19 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC, abstractmethod +from typing import Optional + + +class IdGenerator(ABC): + @abstractmethod + def generate(self, category: Optional[str] = None) -> str: ... + + def generate_function_call_id(self) -> str: + return self.generate("func") + + def generate_function_output_id(self) -> str: + return self.generate("funcout") + + def generate_message_id(self) -> str: + return self.generate("msg") diff --git a/sdk/ai/azure-ai-agentserver-core/cspell.json b/sdk/ai/azure-ai-agentserver-core/cspell.json new file mode 100644 index 000000000000..126cadc0625c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/cspell.json @@ -0,0 +1,27 @@ +{ + "ignoreWords": [ + "Agentic", + "UPIA", + "ANSII", + "inpainting", + "CSDL", + "azureai", + "GLEU", + "fstring", + "alnum", + "GENAI", + "Prereqs", + "mslearn", + "PYTHONIOENCODING", + "GETFL", + "DETFL", + "SETFL", + "Planifica" + ], + "ignorePaths": [ + "*.csv", + "*.json", + "*.rst", + "samples/**" + ] + } \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/dev_requirements.txt b/sdk/ai/azure-ai-agentserver-core/dev_requirements.txt new file mode 100644 index 000000000000..129e3e21fef1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/dev_requirements.txt @@ -0,0 +1,2 @@ +-e ../../../eng/tools/azure-sdk-tools +python-dotenv \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst new file mode 100644 index 000000000000..da01b083b0b3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst @@ -0,0 +1,34 @@ +azure.ai.agentserver.core package +================================= + +.. automodule:: azure.ai.agentserver.core + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.server + +Submodules +---------- + +azure.ai.agentserver.core.constants module +------------------------------------------ + +.. automodule:: azure.ai.agentserver.core.constants + :inherited-members: + :members: + :undoc-members: + +azure.ai.agentserver.core.logger module +--------------------------------------- + +.. automodule:: azure.ai.agentserver.core.logger + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst new file mode 100644 index 000000000000..cf935aa1d1ed --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst @@ -0,0 +1,26 @@ +azure.ai.agentserver.core.server.common.id\_generator package +============================================================= + +.. automodule:: azure.ai.agentserver.core.server.common.id_generator + :inherited-members: + :members: + :undoc-members: + +Submodules +---------- + +azure.ai.agentserver.core.server.common.id\_generator.foundry\_id\_generator module +----------------------------------------------------------------------------------- + +.. automodule:: azure.ai.agentserver.core.server.common.id_generator.foundry_id_generator + :inherited-members: + :members: + :undoc-members: + +azure.ai.agentserver.core.server.common.id\_generator.id\_generator module +-------------------------------------------------------------------------- + +.. automodule:: azure.ai.agentserver.core.server.common.id_generator.id_generator + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst new file mode 100644 index 000000000000..26c4aaf4d15a --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst @@ -0,0 +1,26 @@ +azure.ai.agentserver.core.server.common package +=============================================== + +.. automodule:: azure.ai.agentserver.core.server.common + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.server.common.id_generator + +Submodules +---------- + +azure.ai.agentserver.core.server.common.agent\_run\_context module +------------------------------------------------------------------ + +.. automodule:: azure.ai.agentserver.core.server.common.agent_run_context + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst new file mode 100644 index 000000000000..b82fa765b839 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst @@ -0,0 +1,26 @@ +azure.ai.agentserver.core.server package +======================================== + +.. automodule:: azure.ai.agentserver.core.server + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.server.common + +Submodules +---------- + +azure.ai.agentserver.core.server.base module +-------------------------------------------- + +.. automodule:: azure.ai.agentserver.core.server.base + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/pyproject.toml b/sdk/ai/azure-ai-agentserver-core/pyproject.toml new file mode 100644 index 000000000000..32c533034a3b --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/pyproject.toml @@ -0,0 +1,70 @@ +[project] +name = "azure-ai-agentserver-core" +dynamic = ["version", "readme"] +description = "Agents server adapter for Azure AI" +requires-python = ">=3.10" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +license = "MIT" +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +keywords = ["azure", "azure sdk"] + +dependencies = [ + "azure-monitor-opentelemetry>=1.5.0", + "azure-ai-projects", + "azure-ai-agents>=1.2.0b5", + "azure-core>=1.35.0", + "azure-identity", + "openai>=1.80.0", + "opentelemetry-api>=1.35", + "opentelemetry-exporter-otlp-proto-http", + "starlette>=0.45.0", + "uvicorn>=0.31.0", +] + +[build-system] +requires = ["setuptools>=69", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "samples*", + "doc*", + "azure", + "azure.ai", +] + +[tool.setuptools.dynamic] +version = { attr = "azure.ai.agentserver.core._version.VERSION" } +readme = { file = ["README.md"], content-type = "text/markdown" } + +[tool.setuptools.package-data] +pytyped = ["py.typed"] + +[tool.ruff] +line-length = 120 +target-version = "py311" +lint.select = ["E", "F", "B", "I"] # E=pycodestyle errors, F=Pyflakes, B=bugbear, I=import sort +lint.ignore = [] +fix = false +exclude = [ + "**/azure/ai/agentserver/core/models/", +] + +[tool.ruff.lint.isort] +known-first-party = ["azure.ai.agentserver.core"] +combine-as-imports = true + +[tool.azure-sdk-build] +verifytypes = false # has unknown dependencies +pyright = false \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/pyrightconfig.json b/sdk/ai/azure-ai-agentserver-core/pyrightconfig.json new file mode 100644 index 000000000000..d53d8b9e5fe9 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/pyrightconfig.json @@ -0,0 +1,10 @@ +{ + "reportTypeCommentUsage": true, + "reportMissingImports": false, + "pythonVersion": "3.11", + "exclude": [ + "./samples" + ], + "extraPaths": [ + ] +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample new file mode 100644 index 000000000000..a19b1c6d02f7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample @@ -0,0 +1,24 @@ +# Core agent configuration +API_HOST=github +WEEKEND_PLANNER_MODE=container + +# GitHub Models (when API_HOST=github) +GITHUB_TOKEN=your-github-token +GITHUB_OPENAI_BASE_URL=https://models.inference.ai.azure.com +GITHUB_MODEL=gpt-4o + +# Azure OpenAI (when API_HOST=azure) +AZURE_OPENAI_ENDPOINT=https://.openai.azure.com/ +AZURE_OPENAI_VERSION=2025-01-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT= + +# Telemetry & tracing +OTEL_EXPORTER_OTLP_ENDPOINT=http://127.0.0.1:4318/v1/traces +OTEL_EXPORTER_OTLP_PROTOCOL=grpc +OTEL_EXPORTER_OTLP_GRPC_ENDPOINT=http://127.0.0.1:4317 +APPLICATION_INSIGHTS_CONNECTION_STRING= + +# Optional GenAI capture overrides +OTEL_GENAI_AGENT_NAME=Bilingual Weekend Planner Agent +OTEL_GENAI_AGENT_DESCRIPTION=Assistant that plans weekend activities using weather and events data in multiple languages +OTEL_GENAI_AGENT_ID=bilingual-weekend-planner diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md new file mode 100644 index 000000000000..83296f5dd348 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md @@ -0,0 +1,42 @@ +Bilingual Weekend Planner (Custom Container + Telemetry) + +- Container-hosted multi-agent weekend planner with full GenAI telemetry capture and a standalone tracing demo that exercises `opentelemetry-instrumentation-openai-agents-v2`. + +Prereqs +- Optional: Activate repo venv `source .venv/bin/activate` +- Install deps `pip install -U -r samples/python/custom/bilingual_weekend_planner/requirements.txt` + +Env Vars +Choose the API host via `API_HOST`: + +- `github`: GitHub Models hosted on Azure AI Inference + - `GITHUB_TOKEN` + - Optional: `GITHUB_OPENAI_BASE_URL` (default `https://models.inference.ai.azure.com`) + - Optional: `GITHUB_MODEL` (default `gpt-4o`) +- `azure`: Azure OpenAI + - `AZURE_OPENAI_ENDPOINT` (e.g. `https://.openai.azure.com/`) + - `AZURE_OPENAI_VERSION` (e.g. `2025-01-01-preview`) + - `AZURE_OPENAI_CHAT_DEPLOYMENT` (deployment name) + +Modes +- Container (default): runs the bilingual triage agent via `FoundryCBAgent`. +- `API_HOST=github GITHUB_TOKEN=... ./run.sh` +- `API_HOST=azure AZURE_OPENAI_ENDPOINT=... AZURE_OPENAI_VERSION=2025-01-01-preview AZURE_OPENAI_CHAT_DEPLOYMENT=... ./run.sh` + - Test (non-stream): + `curl -s http://localhost:8088/responses -H 'Content-Type: application/json' -d '{"input":"What should I do this weekend in Seattle?"}'` + - Test (stream): + `curl -s http://localhost:8088/responses -H 'Content-Type: application/json' -d '{"input":"Plan my weekend in Barcelona","stream":true}'` +- Telemetry demo: set `WEEKEND_PLANNER_MODE=demo` to run the content-capture simulation (no model calls). + `WEEKEND_PLANNER_MODE=demo python main.py` + +Telemetry +- Console exporter is enabled by default; set `OTEL_EXPORTER_OTLP_ENDPOINT` (HTTP) or `OTEL_EXPORTER_OTLP_GRPC_ENDPOINT` to export spans elsewhere. +- Set `APPLICATION_INSIGHTS_CONNECTION_STRING` to export spans to Azure Monitor. +- GenAI capture flags are pre-configured (content, system instructions, tool metadata). +- `opentelemetry-instrumentation-openai-agents-v2` enables span-and-event message capture for requests, responses, and tool payloads. +- The tracing demo uses the `agents.tracing` helpers to emit spans without invoking external APIs. + +Notes +- Uses `FoundryCBAgent` to host the bilingual weekend planner triage agent on `http://localhost:8088`. +- Tools: `get_weather`, `get_activities`, `get_current_date`. +- Rich logger output highlights tool invocations; bilingual agents route traveler requests to the right language specialist. diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py new file mode 100644 index 000000000000..099d8dc45181 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py @@ -0,0 +1,579 @@ +# mypy: ignore-errors +"""Bilingual weekend planner sample with full GenAI telemetry capture.""" + +from __future__ import annotations + +import json +import logging +import os +import random +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Callable +from urllib.parse import urlparse + +import azure.identity +import openai +from agents import ( + Agent, + OpenAIChatCompletionsModel, + Runner, + function_tool, + set_default_openai_client, + set_tracing_disabled, +) +from agents.tracing import ( + agent_span as tracing_agent_span, + function_span as tracing_function_span, + generation_span as tracing_generation_span, + trace as tracing_trace, +) +from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent +from azure.ai.agentserver.core.models import ( + CreateResponse, + Response as OpenAIResponse, +) +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + ResponseCompletedEvent, + ResponseCreatedEvent, + ResponseOutputItemAddedEvent, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) +from dotenv import load_dotenv +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.openai_agents import OpenAIAgentsInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from rich.logging import RichHandler + +try: + from azure.monitor.opentelemetry.exporter import ( # mypy: ignore + AzureMonitorTraceExporter, + ) +except Exception: # pragma: no cover + AzureMonitorTraceExporter = None # mypy: ignore + +# Load env early so adapter init sees them +load_dotenv(override=True) + + +logging.basicConfig( + level=logging.WARNING, + format="%(message)s", + datefmt="[%X]", + handlers=[RichHandler()], +) +logger = logging.getLogger("bilingual_weekend_planner") +RUN_MODE = os.getenv("WEEKEND_PLANNER_MODE", "container").lower() + + +@dataclass +class _ApiConfig: + """Helper describing how to create the OpenAI client.""" + + build_client: Callable[[], openai.AsyncOpenAI] + model_name: str + base_url: str + provider: str + + +def _set_capture_env(provider: str, base_url: str) -> None: + """Enable all GenAI capture toggles prior to instrumentation.""" + + capture_defaults = { + "OTEL_INSTRUMENTATION_OPENAI_AGENTS_CAPTURE_CONTENT": "true", + "OTEL_INSTRUMENTATION_OPENAI_AGENTS_CAPTURE_METRICS": "true", + "OTEL_GENAI_CAPTURE_MESSAGES": "true", + "OTEL_GENAI_CAPTURE_SYSTEM_INSTRUCTIONS": "true", + "OTEL_GENAI_CAPTURE_TOOL_DEFINITIONS": "true", + "OTEL_GENAI_EMIT_OPERATION_DETAILS": "true", + "OTEL_GENAI_AGENT_NAME": os.getenv( + "OTEL_GENAI_AGENT_NAME", + "Bilingual Weekend Planner Agent", + ), + "OTEL_GENAI_AGENT_DESCRIPTION": os.getenv( + "OTEL_GENAI_AGENT_DESCRIPTION", + "Assistant that plans weekend activities using weather and events data in multiple languages", + ), + "OTEL_GENAI_AGENT_ID": os.getenv( + "OTEL_GENAI_AGENT_ID", "bilingual-weekend-planner" + ), + } + for env_key, value in capture_defaults.items(): + os.environ.setdefault(env_key, value) + + parsed = urlparse(base_url) + if parsed.hostname: + os.environ.setdefault("OTEL_GENAI_SERVER_ADDRESS", parsed.hostname) + if parsed.port: + os.environ.setdefault("OTEL_GENAI_SERVER_PORT", str(parsed.port)) + + +def _resolve_api_config() -> _ApiConfig: + """Return the client configuration for the requested host.""" + + host = os.getenv("API_HOST", "github").lower() + + if host == "github": + base_url = os.getenv( + "GITHUB_OPENAI_BASE_URL", + "https://models.inference.ai.azure.com", + ).rstrip("/") + model_name = os.getenv("GITHUB_MODEL", "gpt-4o") + api_key = os.environ.get("GITHUB_TOKEN") + if not api_key: + if RUN_MODE != "demo": + raise RuntimeError("GITHUB_TOKEN is required when API_HOST=github") + api_key = "demo-key" + + def _build_client() -> openai.AsyncOpenAI: + return openai.AsyncOpenAI(base_url=base_url, api_key=api_key) + + return _ApiConfig( + build_client=_build_client, + model_name=model_name, + base_url=base_url, + provider="azure.ai.inference", + ) + + if host == "azure": + # Explicitly check for required environment variables + if "AZURE_OPENAI_ENDPOINT" not in os.environ: + raise ValueError("AZURE_OPENAI_ENDPOINT is required when API_HOST=azure") + if "AZURE_OPENAI_VERSION" not in os.environ: + raise ValueError("AZURE_OPENAI_VERSION is required when API_HOST=azure") + if "AZURE_OPENAI_CHAT_DEPLOYMENT" not in os.environ: + raise ValueError( + "AZURE_OPENAI_CHAT_DEPLOYMENT is required when API_HOST=azure" + ) + endpoint = os.environ["AZURE_OPENAI_ENDPOINT"].rstrip("/") + api_version = os.environ["AZURE_OPENAI_VERSION"] + deployment = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"] + + credential = azure.identity.DefaultAzureCredential() + token_provider = azure.identity.get_bearer_token_provider( + credential, + "https://cognitiveservices.azure.com/.default", + ) + + def _build_client() -> openai.AsyncAzureOpenAI: + return openai.AsyncAzureOpenAI( + api_version=api_version, + azure_endpoint=endpoint, + azure_ad_token_provider=token_provider, + ) + + return _ApiConfig( + build_client=_build_client, + model_name=deployment, + base_url=endpoint, + provider="azure.ai.openai", + ) + + raise ValueError( + f"Unsupported API_HOST '{host}'. Supported values are 'github' or 'azure'." + ) + + +def _configure_otel() -> None: + """Configure the tracer provider and exporters.""" + + grpc_endpoint = os.getenv("OTEL_EXPORTER_OTLP_GRPC_ENDPOINT") + if not grpc_endpoint: + default_otlp_endpoint = os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT") + protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "grpc").lower() + if default_otlp_endpoint and protocol == "grpc": + grpc_endpoint = default_otlp_endpoint + + conn = os.getenv("APPLICATION_INSIGHTS_CONNECTION_STRING") + resource = Resource.create( + { + "service.name": "weekend-planner-service", + "service.namespace": "leisure-orchestration", + "service.version": os.getenv("SERVICE_VERSION", "1.0.0"), + } + ) + + tracer_provider = TracerProvider(resource=resource) + + if grpc_endpoint: + tracer_provider.add_span_processor( + BatchSpanProcessor(OTLPSpanExporter(endpoint=grpc_endpoint)) + ) + print(f"[otel] OTLP gRPC exporter configured ({grpc_endpoint})") + elif conn: + if AzureMonitorTraceExporter is None: + print( + "Warning: Azure Monitor exporter not installed. " + "Install with: pip install azure-monitor-opentelemetry-exporter", + ) + tracer_provider.add_span_processor( + BatchSpanProcessor(ConsoleSpanExporter()) + ) + else: + tracer_provider.add_span_processor( + BatchSpanProcessor( + AzureMonitorTraceExporter.from_connection_string(conn) + ) + ) + print("[otel] Azure Monitor trace exporter configured") + else: + tracer_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) + print("[otel] Console span exporter configured") + print( + "[otel] Set APPLICATION_INSIGHTS_CONNECTION_STRING to export to Application Insights " + "instead of the console", + ) + + trace.set_tracer_provider(tracer_provider) + + +api_config = _resolve_api_config() +_set_capture_env(api_config.provider, api_config.base_url) +_configure_otel() +OpenAIAgentsInstrumentor().instrument( + tracer_provider=trace.get_tracer_provider(), + capture_message_content="span_and_event", + agent_name="Weekend Planner", + base_url=api_config.base_url, + system=api_config.provider, +) + +client = api_config.build_client() +set_default_openai_client(client) +set_tracing_disabled(False) + + +def _chat_model() -> OpenAIChatCompletionsModel: + """Return the chat completions model used for weekend planning.""" + + return OpenAIChatCompletionsModel(model=api_config.model_name, openai_client=client) + + +SUNNY_WEATHER_PROBABILITY = 0.05 + + +@function_tool +def get_weather(city: str) -> dict[str, object]: + """Fetch mock weather information for the requested city.""" + + logger.info("Getting weather for %s", city) + if random.random() < SUNNY_WEATHER_PROBABILITY: + return {"city": city, "temperature": 72, "description": "Sunny"} + return {"city": city, "temperature": 60, "description": "Rainy"} + + +@function_tool +def get_activities(city: str, date: str) -> list[dict[str, object]]: + """Return mock activities for the supplied city and date.""" + + logger.info("Getting activities for %s on %s", city, date) + return [ + {"name": "Hiking", "location": city}, + {"name": "Beach", "location": city}, + {"name": "Museum", "location": city}, + ] + + +@function_tool +def get_current_date() -> str: + """Return the current date as YYYY-MM-DD.""" + + logger.info("Getting current date") + return datetime.now().strftime("%Y-%m-%d") + + +ENGLISH_WEEKEND_PLANNER = Agent( + name="Weekend Planner (English)", + instructions=( + "You help English-speaking travelers plan their weekends. " + "Use the available tools to gather the weekend date, current weather, and local activities. " + "Only recommend activities that align with the weather and include the date in your final response." + ), + tools=[get_weather, get_activities, get_current_date], + model=_chat_model(), +) + +# cSpell:disable +SPANISH_WEEKEND_PLANNER = Agent( + name="Planificador de fin de semana (Español)", + instructions=( + "Ayudas a viajeros hispanohablantes a planificar su fin de semana. " + "Usa las herramientas disponibles para obtener la fecha, el clima y actividades locales. " + "Recomienda actividades acordes al clima e incluye la fecha del fin de semana en tu respuesta." + ), + tools=[get_weather, get_activities, get_current_date], + model=_chat_model(), +) + +TRIAGE_AGENT = Agent( + name="Weekend Planner Triage", + instructions=( + "Revisa el idioma del viajero. " + "Si el mensaje está en español, realiza un handoff a 'Planificador de fin de semana (Español)'. " + "De lo contrario, usa 'Weekend Planner (English)'." + ), + handoffs=[SPANISH_WEEKEND_PLANNER, ENGLISH_WEEKEND_PLANNER], + model=_chat_model(), +) +# cSpell:enable + + +def _root_span_name(provider: str) -> str: + return f"weekend_planning_session[{provider}]" + + +def _apply_weekend_semconv( + span: trace.Span, + *, + user_text: str, + final_text: str, + conversation_id: str | None, + response_id: str, + final_agent_name: str | None, + success: bool, +) -> None: + parsed = urlparse(api_config.base_url) + if parsed.hostname: + span.set_attribute("server.address", parsed.hostname) + if parsed.port: + span.set_attribute("server.port", parsed.port) + + span.set_attribute("gen_ai.operation.name", "invoke_agent") + span.set_attribute("gen_ai.provider.name", api_config.provider) + span.set_attribute("gen_ai.request.model", api_config.model_name) + span.set_attribute("gen_ai.output.type", "text") + span.set_attribute("gen_ai.response.model", api_config.model_name) + span.set_attribute("gen_ai.response.id", response_id) + span.set_attribute( + "gen_ai.response.finish_reasons", + ["stop"] if success else ["error"], + ) + + if conversation_id: + span.set_attribute("gen_ai.conversation.id", conversation_id) + if TRIAGE_AGENT.instructions: + span.set_attribute("gen_ai.system_instructions", TRIAGE_AGENT.instructions) + if final_agent_name: + span.set_attribute("gen_ai.agent.name", final_agent_name) + else: + span.set_attribute("gen_ai.agent.name", TRIAGE_AGENT.name) + if user_text: + span.set_attribute( + "gen_ai.input.messages", + json.dumps([{"role": "user", "content": user_text}]), + ) + if final_text: + span.set_attribute( + "gen_ai.output.messages", + json.dumps([{"role": "assistant", "content": final_text}]), + ) + + +def _extract_user_text(request: CreateResponse) -> str: + """Extract the first user text input from the request body.""" + + input = request.get("input") + if not input: + return "" + + first = input[0] + content = first.get("content", None) if isinstance(first, dict) else first + if isinstance(content, str): + return content + + if isinstance(content, list): + for item in content: + text = item.get("text", None) + if text: + return text + return "" + + +def _stream_final_text(final_text: str, context: AgentRunContext): + """Yield streaming events for the provided final text.""" + + async def _async_stream(): + assembled = "" + yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) + item_id = context.id_generator.generate_message_id() + yield ResponseOutputItemAddedEvent( + output_index=0, + item=ResponsesAssistantMessageItemResource( + id=item_id, + status="in_progress", + content=[ItemContentOutputText(text="", annotations=[])], + ), + ) + + words = final_text.split(" ") + for idx, token in enumerate(words): + piece = token if idx == len(words) - 1 else token + " " + assembled += piece + yield ResponseTextDeltaEvent(output_index=0, content_index=0, delta=piece) + + yield ResponseTextDoneEvent(output_index=0, content_index=0, text=assembled) + yield ResponseCompletedEvent( + response=OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id=context.response_id, + created_at=datetime.now(timezone.utc), + output=[ + ResponsesAssistantMessageItemResource( + id=item_id, + status="completed", + content=[ItemContentOutputText(text=assembled, annotations=[])], + ) + ], + ) + ) + + return _async_stream() + + +def dump(title: str, payload: object) -> None: + """Pretty print helper for the tracing demo.""" + + print(f"\n=== {title} ===") + print(json.dumps(payload, indent=2)) + + +def run_content_capture_demo() -> None: + """Simulate an agent workflow using the tracing helpers without calling an API.""" + + itinerary_prompt = [ + {"role": "system", "content": "Help travelers plan memorable weekends."}, + {"role": "user", "content": "I'm visiting Seattle this weekend."}, + ] + tool_args = {"city": "Seattle", "date": "2025-05-17"} + tool_result = { + "forecast": "Light rain, highs 60°F", + "packing_tips": ["rain jacket", "waterproof shoes"], + } + + with tracing_trace("weekend-planner-simulation"): + with tracing_agent_span(name="weekend_planner_demo") as agent: + dump( + "Agent span started", + {"span_id": agent.span_id, "trace_id": agent.trace_id}, + ) + + with tracing_generation_span( + input=itinerary_prompt, + output=[ + { + "role": "assistant", + "content": ( + "Day 1 explore Pike Place Market, Day 2 visit the Museum of Pop Culture, " + "Day 3 take the Bainbridge ferry if weather allows." + ), + } + ], + model=api_config.model_name, + usage={ + "input_tokens": 128, + "output_tokens": 96, + "total_tokens": 224, + }, + ): + pass + + with tracing_function_span( + name="get_weather", + input=json.dumps(tool_args), + output=tool_result, + ): + pass + + print("\nWorkflow complete – spans exported to the configured OTLP endpoint.") + + +class WeekendPlannerContainer(FoundryCBAgent): + """Container entry point that surfaces the weekend planner agent via FoundryCBAgent.""" + + async def agent_run(self, context: AgentRunContext): + request = context.request + user_text = _extract_user_text(request) + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span(_root_span_name(api_config.provider)) as span: + span.set_attribute("user.request", user_text) + span.set_attribute("api.host", os.getenv("API_HOST", "github")) + span.set_attribute("model.name", api_config.model_name) + span.set_attribute("agent.name", TRIAGE_AGENT.name) + span.set_attribute("triage.languages", "en,es") + + try: + result = await Runner.run(TRIAGE_AGENT, input=user_text) + final_text = str(result.final_output or "") + span.set_attribute( + "agent.response", final_text[:500] if final_text else "" + ) + final_agent = getattr(result, "last_agent", None) + if final_agent and getattr(final_agent, "name", None): + span.set_attribute("agent.final", final_agent.name) + span.set_attribute("request.success", True) + _apply_weekend_semconv( + span, + user_text=user_text, + final_text=final_text, + conversation_id=context.conversation_id, + response_id=context.response_id, + final_agent_name=getattr(final_agent, "name", None), + success=True, + ) + logger.info("Weekend planning completed successfully") + except Exception as exc: # pragma: no cover - defensive logging path + span.record_exception(exc) + span.set_attribute("request.success", False) + span.set_attribute("error.type", exc.__class__.__name__) + logger.error("Error during weekend planning: %s", exc) + final_text = f"Error running agent: {exc}" + _apply_weekend_semconv( + span, + user_text=user_text, + final_text=final_text, + conversation_id=context.conversation_id, + response_id=context.response_id, + final_agent_name=None, + success=False, + ) + + if request.get("stream", False): + return _stream_final_text(final_text, context) + + response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id=context.response_id, + created_at=datetime.now(timezone.utc), + output=[ + ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="completed", + content=[ItemContentOutputText(text=final_text, annotations=[])], + ) + ], + ) + return response + + +if __name__ == "__main__": + logger.setLevel(logging.INFO) + try: + if RUN_MODE == "demo": + run_content_capture_demo() + else: + WeekendPlannerContainer().run() + finally: + trace.get_tracer_provider().shutdown() diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt new file mode 100644 index 000000000000..faf4fd5fbe2c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt @@ -0,0 +1,13 @@ +openai-agents>=0.3.3 +python-dotenv +openai>=1.42.0 +azure-identity>=1.17.0 +opentelemetry-api>=1.26.0 +opentelemetry-sdk>=1.26.0 +opentelemetry-exporter-otlp-proto-http>=1.26.0 +opentelemetry-exporter-otlp-proto-grpc>=1.26.0 +opentelemetry-instrumentation-openai-agents-v2>=0.1.0 +rich>=13.9.0 +azure-ai-agentserver-core +# Optional tracing exporters +azure-monitor-opentelemetry-exporter>=1.0.0b16 diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh new file mode 100644 index 000000000000..e3d097e14166 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Simple local runner for the bilingual weekend planner container sample. +# Examples: +# API_HOST=github GITHUB_TOKEN=... ./run.sh + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +ROOT_DIR="$(cd "$SCRIPT_DIR/../../../.." && pwd)" + +export PYTHONPATH="$ROOT_DIR:${PYTHONPATH:-}" + +if [[ -d "$ROOT_DIR/.venv" ]]; then + # shellcheck disable=SC1090 + source "$ROOT_DIR/.venv/bin/activate" +fi + +PYTHON_BIN="${ROOT_DIR}/.venv/bin/python" +if [[ ! -x "$PYTHON_BIN" ]]; then + PYTHON_BIN="python3" +fi + +"$PYTHON_BIN" -u "$SCRIPT_DIR/main.py" diff --git a/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py new file mode 100644 index 000000000000..af9812826941 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py @@ -0,0 +1,246 @@ +# mypy: ignore-errors +"""Custom MCP simple sample. + +This sample combines the patterns from: + - langgraph `mcp_simple` (uses MultiServerMCPClient to discover tools) + - `custom_mock_agent_test` (implements a custom FoundryCBAgent with streaming events) + +Goal: When invoked in stream mode, emit MCP list tools related stream events so a +consumer (UI / CLI) can visualize tool enumeration plus a final assistant +message. In non-stream mode, return a single aggregated response summarizing +the tools. + +Run: + python mcp_simple.py + +Then call (example): + curl -X POST http://localhost:8088/responses -H 'Content-Type: application/json' -d '{ + "agent": {"name": "custom_mcp", "type": "agent_reference"}, + "stream": true, + "input": "List the tools available" + }' +""" + +import datetime +import json +from typing import AsyncGenerator, List + +from langchain_mcp_adapters.client import MultiServerMCPClient + +from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent +from azure.ai.agentserver.core.models import Response as OpenAIResponse +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + MCPListToolsItemResource, + MCPListToolsTool, + ResponseCompletedEvent, + ResponseCreatedEvent, + ResponseMCPListToolsCompletedEvent, + ResponseMCPListToolsInProgressEvent, + ResponseOutputItemAddedEvent, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + + +class MCPToolsAgent(FoundryCBAgent): + def __init__(self): # noqa: D401 + super().__init__() + # Lazy init; created on first request to avoid startup latency if unused + self._mcp_client = None + + async def _get_client(self) -> MultiServerMCPClient: + if self._mcp_client is None: + # Mirror langgraph sample server config + self._mcp_client = MultiServerMCPClient( + { + "mslearn": { + "url": "https://learn.microsoft.com/api/mcp", + "transport": "streamable_http", + } + } + ) + return self._mcp_client + + async def _list_tools(self) -> List[MCPListToolsTool]: + client = await self._get_client() + try: + raw_tools = await client.get_tools() + tools: List[MCPListToolsTool] = [] + for t in raw_tools: + # Support either dict-like or attribute-based tool objects + if isinstance(t, dict): + name = t.get("name", "unknown_tool") + description = t.get("description") + schema = ( + t.get("input_schema") + or t.get("schema") + or t.get("parameters") + or {} + ) + else: # Fallback to attribute access + name = getattr(t, "name", "unknown_tool") + description = getattr(t, "description", None) + schema = ( + getattr(t, "input_schema", None) + or getattr(t, "schema", None) + or getattr(t, "parameters", None) + or {} + ) + tools.append( + MCPListToolsTool( + name=name, + description=description, + input_schema=schema, + ) + ) + if not tools: + raise ValueError("No tools discovered from MCP server") + return tools + except Exception: # noqa: BLE001 + # Provide deterministic fallback so sample always works offline + return [ + MCPListToolsTool( + name="fallback_echo", + description="Echo back provided text.", + input_schema={ + "type": "object", + "properties": {"text": {"type": "string"}}, + "required": ["text"], + }, + ) + ] + + async def agent_run(self, context: AgentRunContext): # noqa: D401 + """Implements the FoundryCBAgent contract. + + Streaming path emits MCP list tools events + assistant summary. + Non-stream path returns aggregated assistant message. + """ + + tools = await self._list_tools() + + if context.stream: + + async def stream() -> AsyncGenerator: # noqa: D401 + # Initial empty response context (pattern from mock sample) + yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) + + # Indicate listing in progress + yield ResponseMCPListToolsInProgressEvent() + + mcp_item = MCPListToolsItemResource( + id=context.id_generator.generate("mcp_list"), + server_label="mslearn", + tools=tools, + ) + yield ResponseOutputItemAddedEvent(output_index=0, item=mcp_item) + yield ResponseMCPListToolsCompletedEvent() + + # Assistant streaming summary + assistant_item = ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="in_progress", + content=[ItemContentOutputText(text="", annotations=[])], + ) + yield ResponseOutputItemAddedEvent(output_index=1, item=assistant_item) + + summary_text = "Discovered MCP tools: " + ", ".join( + t.name for t in tools + ) + assembled = "" + parts = summary_text.split(" ") + for i, token in enumerate(parts): + piece = token if i == len(parts) - 1 else token + " " # keep spaces + assembled += piece + yield ResponseTextDeltaEvent( + output_index=1, content_index=0, delta=piece + ) + yield ResponseTextDoneEvent( + output_index=1, content_index=0, text=assembled + ) + + final_response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id=context.response_id, + created_at=datetime.datetime.now(), + output=[ + mcp_item, + ResponsesAssistantMessageItemResource( + id=assistant_item.id, + status="completed", + content=[ + ItemContentOutputText(text=assembled, annotations=[]) + ], + ), + ], + ) + yield ResponseCompletedEvent(response=final_response) + + return stream() + + # Non-stream path: single assistant message + # Build a JSON-serializable summary. Avoid dumping complex model/schema objects that + # can include non-serializable metaclass references (seen in error stacktrace). + safe_tools = [] + for t in tools: + schema = t.input_schema + # Simplify schema to plain dict/str; if not directly serializable, fallback to string. + if isinstance(schema, (str, int, float, bool)) or schema is None: + safe_schema = schema + elif isinstance(schema, dict): + # Shallow copy ensuring nested values are primitive or stringified + safe_schema = {} + for k, v in schema.items(): + if isinstance(v, (str, int, float, bool, type(None), list, dict)): + safe_schema[k] = v + else: + safe_schema[k] = str(v) + else: + safe_schema = str(schema) + safe_tools.append( + { + "name": t.name, + "description": t.description, + # Provide only top-level schema keys if dict. + "input_schema_keys": list(safe_schema.keys()) + if isinstance(safe_schema, dict) + else safe_schema, + } + ) + summary = { + "server_label": "mslearn", + "tool_count": len(tools), + "tools": safe_tools, + } + content = [ + ItemContentOutputText( + text="MCP tool listing completed.\n" + json.dumps(summary, indent=2), + annotations=[], + ) + ] + return OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id="id", + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="completed", + content=content, + ) + ], + ) + + +my_agent = MCPToolsAgent() + +if __name__ == "__main__": + my_agent.run() diff --git a/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt new file mode 100644 index 000000000000..525ee6af3f7d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt @@ -0,0 +1,2 @@ +langchain-mcp-adapters==0.1.11 +azure-ai-agentserver-core diff --git a/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py new file mode 100644 index 000000000000..3d4187a188f2 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py @@ -0,0 +1,104 @@ +# mypy: ignore-errors +import datetime + +from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent +from azure.ai.agentserver.core.models import Response as OpenAIResponse +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + ResponseCompletedEvent, + ResponseCreatedEvent, + ResponseOutputItemAddedEvent, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + + +def stream_events(text: str, context: AgentRunContext): + item_id = context.id_generator.generate_message_id() + + assembled = "" + yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) + yield ResponseOutputItemAddedEvent( + output_index=0, + item=ResponsesAssistantMessageItemResource( + id=item_id, + status="in_progress", + content=[ + ItemContentOutputText( + text="", + annotations=[], + ) + ], + ), + ) + for i, token in enumerate(text.split(" ")): + piece = token if i == len(text.split(" ")) - 1 else token + " " + assembled += piece + yield ResponseTextDeltaEvent(output_index=0, content_index=0, delta=piece) + # Done with text + yield ResponseTextDoneEvent(output_index=0, content_index=0, text=assembled) + yield ResponseCompletedEvent( + response=OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="me", + id=context.response_id, + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + id=item_id, + status="completed", + content=[ + ItemContentOutputText( + text=assembled, + annotations=[], + ) + ], + ) + ], + ) + ) + + +async def agent_run(context: AgentRunContext): + agent = context.request.get("agent") + print(f"agent:{agent}") + + if context.stream: + return stream_events( + "I am mock agent with no intelligence in stream mode.", context + ) + + # Build assistant output content + output_content = [ + ItemContentOutputText( + text="I am mock agent with no intelligence.", + annotations=[], + ) + ] + + response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="me", + id=context.response_id, + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="completed", + content=output_content, + ) + ], + ) + return response + + +my_agent = FoundryCBAgent() +my_agent.agent_run = agent_run + +if __name__ == "__main__": + my_agent.run() diff --git a/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt new file mode 100644 index 000000000000..3f2b4e9ee6b4 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-core diff --git a/sdk/ai/azure-ai-agentserver-core/tests/conftest.py b/sdk/ai/azure-ai-agentserver-core/tests/conftest.py new file mode 100644 index 000000000000..e84bdfff3bd7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/tests/conftest.py @@ -0,0 +1,456 @@ +""" +Pytest configuration for samples gated tests. + +This file automatically loads environment variables from .env file +and provides shared test fixtures. +""" + +import json +import logging +import os +import socket +import subprocess +import sys +import time +from pathlib import Path +from typing import Any, Dict, Optional + +import pytest +import requests +from dotenv import load_dotenv + +# Load .env file from project root or current directory +# conftest.py is at: src/adapter/python/tests/gated_test/conftest.py +# Need to go up 6 levels to reach project root +project_root = Path(__file__).parent.parent +env_paths = [ + project_root / ".env", # Project root + Path.cwd() / ".env", # Current working directory + Path(__file__).parent / ".env", # Test directory +] + +for env_path in env_paths: + if env_path.exists(): + load_dotenv(env_path, override=True) + break + +# Setup logging +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + handlers=[logging.StreamHandler(sys.stdout)], +) +logger = logging.getLogger(__name__) + + +class AgentTestClient: + """Generic test client for all agent types.""" + + def __init__( + self, + sample_name: str, + script_name: str, + endpoint: str = "/responses", # Default endpoint + base_url: Optional[str] = None, + env_vars: Optional[Dict[str, str]] = None, + timeout: int = 120, + port: Optional[int] = None, + ): + self.sample_name = sample_name + self.script_name = script_name + self.endpoint = endpoint + self.timeout = timeout + + # Setup paths + self.project_root = project_root # Use already defined project_root + self.sample_dir = self.project_root / "samples" / sample_name + self.original_dir = os.getcwd() + + # Determine port assignment priority: explicit param > env override > random + if env_vars and env_vars.get("DEFAULT_AD_PORT"): + self.port = int(env_vars["DEFAULT_AD_PORT"]) + elif port is not None: + self.port = port + else: + self.port = self._find_free_port() + + # Configure base URL for client requests + self.base_url = (base_url or f"http://127.0.0.1:{self.port}").rstrip("/") + + # Setup environment + # Get Agent Framework configuration (new format) + azure_ai_project_endpoint = os.getenv("AZURE_AI_PROJECT_ENDPOINT", "") + azure_ai_model_deployment = os.getenv("AZURE_AI_MODEL_DEPLOYMENT_NAME", "") + agent_project_name = os.getenv("AGENT_PROJECT_NAME", "") + + # Get legacy Azure OpenAI configuration (for backward compatibility) + main_api_key = os.getenv("AZURE_OPENAI_API_KEY", "") + main_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT", "") + main_api_version = os.getenv("OPENAI_API_VERSION", "2025-03-01-preview") + embedding_api_version = os.getenv("AZURE_OPENAI_EMBEDDINGS_API_VERSION", "2024-02-01") + + self.env_vars = { + "PYTHONIOENCODING": "utf-8", + "LANG": "C.UTF-8", + "LC_ALL": "C.UTF-8", + "PYTHONUNBUFFERED": "1", + # Agent Framework environment variables (new) + "AZURE_AI_PROJECT_ENDPOINT": azure_ai_project_endpoint, + "AZURE_AI_MODEL_DEPLOYMENT_NAME": azure_ai_model_deployment, + "AGENT_PROJECT_NAME": agent_project_name, + # Legacy Azure OpenAI environment variables (for backward compatibility) + "AZURE_OPENAI_API_KEY": main_api_key, + "AZURE_OPENAI_ENDPOINT": main_endpoint, + "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME": os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", ""), + "OPENAI_API_VERSION": main_api_version, + } + + # Auto-configure embeddings to use main config if not explicitly set + # This allows using the same Azure OpenAI resource for both chat and embeddings + self.env_vars["AZURE_OPENAI_EMBEDDINGS_API_KEY"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_API_KEY", + main_api_key, # Fallback to main API key + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_ENDPOINT"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_ENDPOINT", + main_endpoint, # Fallback to main endpoint + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME", "" + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_API_VERSION"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_API_VERSION", + embedding_api_version, # Fallback to main API version + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_MODEL_NAME"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_MODEL_NAME", + os.getenv("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME", ""), # Fallback to deployment name + ) + + if env_vars: + self.env_vars.update(env_vars) + + # Ensure server picks the dynamically assigned port and clients know how to reach it + self.env_vars.setdefault("DEFAULT_AD_PORT", str(self.port)) + self.env_vars.setdefault("AGENT_BASE_URL", self.base_url) + + self.process = None + self.session = requests.Session() + + @staticmethod + def _find_free_port() -> int: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.bind(("127.0.0.1", 0)) + return sock.getsockname()[1] + + def setup(self): + """Setup test environment.""" + os.chdir(self.sample_dir) + + logger.info( + "Configured %s to listen on %s", + self.sample_name, + f"{self.base_url}{self.endpoint}", + ) + + # Validate critical environment variables + # For Agent Framework samples, check new env vars first + required_vars = [] + if "agent_framework" in self.sample_name: + # Agent Framework samples use new format + required_vars = [ + "AZURE_AI_PROJECT_ENDPOINT", + "AZURE_AI_MODEL_DEPLOYMENT_NAME", + ] + else: + # Legacy samples use old format + required_vars = [ + "AZURE_OPENAI_API_KEY", + "AZURE_OPENAI_ENDPOINT", + "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", + ] + + missing_vars = [] + for var in required_vars: + value = self.env_vars.get(var) or os.getenv(var) + if not value: + missing_vars.append(var) + else: + logger.debug(f"Environment variable {var} is set") + + if missing_vars: + logger.error(f"Missing required environment variables: {', '.join(missing_vars)}") + logger.error(f"Sample name: {self.sample_name}") + if "agent_framework" in self.sample_name: + logger.error("For Agent Framework samples, please set:") + logger.error(" - AZURE_AI_PROJECT_ENDPOINT") + logger.error(" - AZURE_AI_MODEL_DEPLOYMENT_NAME") + pytest.skip(f"Missing required environment variables: {', '.join(missing_vars)}") + + # Set environment variables + for key, value in self.env_vars.items(): + if value: # Only set if value is not empty + os.environ[key] = value + + # Start server + self.start_server() + + # Wait for server to be ready + if not self.wait_for_ready(): + self.cleanup() + logger.error(f"{self.sample_name} server failed to start") + pytest.skip(f"{self.sample_name} server failed to start") + + def start_server(self): + """Start the agent server.""" + logger.info( + "Starting %s server in %s on port %s", + self.sample_name, + self.sample_dir, + self.port, + ) + + env = os.environ.copy() + env.update(self.env_vars) + env["DEFAULT_AD_PORT"] = str(self.port) + env.setdefault("AGENT_BASE_URL", self.base_url) + + # Use unbuffered output to capture logs in real-time + self.process = subprocess.Popen( + [sys.executable, "-u", self.script_name], # -u for unbuffered output + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, # Merge stderr into stdout + env=env, + text=True, + encoding="utf-8", + errors="replace", + bufsize=1, # Line buffered + ) + logger.info(f"Server process started with PID {self.process.pid}") + + def wait_for_ready(self, max_attempts: int = 30, delay: float = 1.0) -> bool: + """Wait for server to be ready.""" + logger.info( + "Waiting for server to be ready at %s (max %s attempts)", + f"{self.base_url}{self.endpoint}", + max_attempts, + ) + + for i in range(max_attempts): + # Check process status first + if self.process.poll() is not None: + # Process has terminated - read all output + stdout, stderr = self.process.communicate() + logger.error(f"Server terminated with code {self.process.returncode}") + logger.error("=== SERVER OUTPUT ===") + if stdout: + logger.error(stdout) + if stderr: + logger.error("=== STDERR ===") + logger.error(stderr) + return False + + # Read and log any available output + self._log_server_output() + + # Check health endpoint + try: + health_response = self.session.get(f"{self.base_url}/readiness", timeout=2) + if health_response.status_code == 200: + logger.info(f"Server ready after {i + 1} attempts") + return True + else: + logger.debug(f"Health check attempt {i + 1}: status {health_response.status_code}") + except Exception as e: + logger.debug(f"Health check attempt {i + 1} failed: {e}") + # After several failed attempts, show server output for debugging + if i > 5 and i % 5 == 0: + logger.warning(f"Server still not ready after {i + 1} attempts, checking output...") + self._log_server_output(force=True) + + time.sleep(delay) + + # Timeout reached - dump all server output + logger.error(f"Server failed to start within {max_attempts} attempts") + self._dump_server_output() + return False + + def cleanup(self): + """Cleanup resources.""" + if self.process: + try: + self.process.terminate() + self.process.wait(timeout=5) + except Exception: + self.process.kill() + + os.chdir(self.original_dir) + + def request( + self, + input_data: Any, + stream: bool = False, + timeout: Optional[int] = None, + debug: bool = False, + ) -> requests.Response: + """Send request to the server.""" + url = f"{self.base_url}{self.endpoint}" + timeout = timeout or self.timeout + + payload = {"input": input_data, "stream": stream} + + headers = { + "Content-Type": "application/json; charset=utf-8", + "Accept": "application/json; charset=utf-8", + } + + if debug: + logger.info(f">>> POST {url}") + logger.info(f">>> Headers: {headers}") + logger.info(f">>> Payload: {json.dumps(payload, indent=2)}") + + try: + response = self.session.post(url, json=payload, headers=headers, timeout=timeout, stream=stream) + + if debug: + logger.info(f"<<< Status: {response.status_code}") + logger.info(f"<<< Headers: {dict(response.headers)}") + + # For non-streaming responses, log the body + if not stream: + try: + content = response.json() + logger.info(f"<<< Body: {json.dumps(content, indent=2)}") + except (ValueError, requests.exceptions.JSONDecodeError): + logger.info(f"<<< Body: {response.text}") + + return response + + except Exception as e: + logger.error(f"Request failed: {e}") + self._log_server_output() + raise + + def _log_server_output(self, force=False): + """Log server output for debugging.""" + if self.process and self.process.poll() is None and hasattr(self.process, "stdout"): + try: + import select + + if hasattr(select, "select"): + # Use non-blocking read + ready, _, _ = select.select([self.process.stdout], [], [], 0.1) + if ready: + # Read available lines without blocking + import fcntl + import os as os_module + + # Set non-blocking mode + fd = self.process.stdout.fileno() + fl = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, fl | os_module.O_NONBLOCK) + + try: + while True: + line = self.process.stdout.readline() + if not line: + break + line = line.strip() + if line: + if force or any( + keyword in line.lower() + for keyword in [ + "error", + "exception", + "traceback", + "failed", + ] + ): + logger.error(f"Server output: {line}") + else: + logger.info(f"Server output: {line}") + except BlockingIOError: + pass # No more data available + except Exception as e: + if force: + logger.debug(f"Could not read server output: {e}") + + def _dump_server_output(self): + """Dump all remaining server output.""" + if self.process: + try: + # Try to read any remaining output + if self.process.poll() is None: + # Process still running, terminate and get output + self.process.terminate() + try: + stdout, stderr = self.process.communicate(timeout=5) + except subprocess.TimeoutExpired: + self.process.kill() + stdout, stderr = self.process.communicate() + else: + stdout, stderr = self.process.communicate() + + if stdout: + logger.error(f"=== FULL SERVER OUTPUT ===\n{stdout}") + if stderr: + logger.error(f"=== FULL SERVER STDERR ===\n{stderr}") + except Exception as e: + logger.error(f"Failed to dump server output: {e}") + + +@pytest.fixture +def basic_client(): + """Fixture for basic agent tests.""" + client = AgentTestClient( + sample_name="agent_framework/basic_simple", + script_name="minimal_example.py", + endpoint="/responses", + timeout=60, + ) + client.setup() + yield client + client.cleanup() + + +@pytest.fixture +def workflow_client(): + """Fixture for workflow agent tests (reflection pattern with Worker + Reviewer).""" + client = AgentTestClient( + sample_name="agent_framework/workflow_agent_simple", + script_name="workflow_agent_simple.py", + endpoint="/responses", # Changed from /runs to /responses + timeout=600, # Increased timeout for workflow agent (reflection loop may need multiple iterations) + ) + client.setup() + yield client + client.cleanup() + + +@pytest.fixture +def mcp_client(): + """Fixture for MCP simple agent tests (uses Microsoft Learn MCP, no auth required).""" + client = AgentTestClient( + sample_name="agent_framework/mcp_simple", + script_name="mcp_simple.py", + endpoint="/responses", # Changed from /runs to /responses + timeout=120, + ) + client.setup() + yield client + client.cleanup() + + +@pytest.fixture +def mcp_apikey_client(): + """Fixture for MCP API Key agent tests (uses GitHub MCP, requires GITHUB_TOKEN).""" + client = AgentTestClient( + sample_name="agent_framework/mcp_apikey", + script_name="mcp_apikey.py", + endpoint="/responses", # Changed from /runs to /responses + timeout=120, + env_vars={"GITHUB_TOKEN": os.getenv("GITHUB_TOKEN", "")}, + ) + client.setup() + yield client + client.cleanup() diff --git a/sdk/ai/azure-ai-agentserver-core/tests/env-template b/sdk/ai/azure-ai-agentserver-core/tests/env-template new file mode 100644 index 000000000000..33c60226b90b --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/tests/env-template @@ -0,0 +1,31 @@ +# ===== Agent Framework Configuration (NEW - Required for agent_framework samples) ===== +# Required for all Agent Framework samples (basic_simple, mcp_simple, mcp_apikey, workflow_agent_simple) +AZURE_AI_PROJECT_ENDPOINT=https://.region.project.azure.ai/ +AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o + +# Optional: Azure AI Project resource ID for telemetry +# Format: /subscriptions//resourceGroups//providers/Microsoft.MachineLearningServices/workspaces/ +AGENT_PROJECT_NAME= + +# GitHub Token for MCP samples (mcp_simple, mcp_apikey) +# Get from: https://github.com/settings/tokens +GITHUB_TOKEN=your-github-token-here + +# ===== Legacy Azure OpenAI Configuration (For backward compatibility) ===== +AZURE_OPENAI_API_KEY=your-api-key-here +AZURE_OPENAI_ENDPOINT=https://your-endpoint.openai.azure.com/ +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=gpt-4o +OPENAI_API_VERSION=2025-03-01-preview + +# Azure OpenAI Embeddings Configuration (for RAG tests) +# If not set, will use the same values as Chat API +AZURE_OPENAI_EMBEDDINGS_API_KEY=your-embeddings-api-key-here +AZURE_OPENAI_EMBEDDINGS_ENDPOINT=https://your-endpoint.openai.azure.com/ +AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME=text-embedding-ada-002 +AZURE_OPENAI_EMBEDDINGS_API_VERSION=2025-03-01-preview + +# Note: +# - Copy this file to .env and fill in your actual values +# - Never commit .env file to git (it's in .gitignore) +# - In CI/CD, these values are loaded from GitHub Secrets + diff --git a/sdk/ai/azure-ai-agentserver-core/tests/test_custom.py b/sdk/ai/azure-ai-agentserver-core/tests/test_custom.py new file mode 100644 index 000000000000..f8f2075e22e5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/tests/test_custom.py @@ -0,0 +1,298 @@ +#!/usr/bin/env python3 +""" +Custom agents samples gated test. + +This module tests all Custom agent samples with parametrized test cases. +Each sample gets its own test class with multiple test scenarios. +""" + +import os +import socket +import subprocess +import sys +import time +from pathlib import Path +from typing import Any + +import pytest +import requests + +# Add the project root to the path +project_root = Path(__file__).parent.parent +sys.path.insert(0, str(project_root)) + + +class BaseCustomAgentTest: + """Base class for Custom agent sample tests with common utilities.""" + + def __init__(self, sample_name: str, script_name: str): + """ + Initialize test configuration. + + Args: + sample_name: Name of the sample directory (e.g., 'simple_mock_agent') + script_name: Name of the Python script to run (e.g., 'custom_mock_agent_test.py') + """ + self.sample_name = sample_name + self.script_name = script_name + self.sample_dir = project_root / "samples" / sample_name + self.port = self._find_free_port() + self.base_url = f"http://127.0.0.1:{self.port}" + self.responses_endpoint = f"{self.base_url}/responses" + self.process = None + self.original_dir = os.getcwd() + + def setup(self): + """Set up environment (dependencies are pre-installed in CI/CD).""" + os.chdir(self.sample_dir) + + def start_server(self): + """Start the agent server in background.""" + # Prepare environment with UTF-8 encoding to handle emoji in agent output + env = os.environ.copy() + env["PYTHONIOENCODING"] = "utf-8" + env["DEFAULT_AD_PORT"] = str(self.port) + env.setdefault("AGENT_BASE_URL", self.base_url) + + # Use subprocess.DEVNULL to avoid buffering issues + self.process = subprocess.Popen( + [sys.executable, self.script_name], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + ) + + def wait_for_ready(self, max_attempts: int = 30, delay: float = 1.0) -> bool: + """Wait for the server to be ready.""" + for _i in range(max_attempts): + # Check if process is still running + if self.process and self.process.poll() is not None: + # Process has terminated + print(f"Server process terminated unexpectedly with exit code {self.process.returncode}") + return False + + try: + response = requests.get(f"{self.base_url}/readiness", timeout=1) + if response.status_code == 200: + return True + except requests.exceptions.RequestException: + pass + + try: + response = requests.get(self.base_url, timeout=1) + if response.status_code in [200, 404]: + return True + except requests.exceptions.RequestException: + pass + + time.sleep(delay) + + # Server didn't start - print diagnostics + if self.process: + self.process.terminate() + stdout, stderr = self.process.communicate(timeout=5) + print(f"Server failed to start. Logs:\n{stdout}\nErrors:\n{stderr}") + + return False + + def send_request(self, input_data: Any, stream: bool = False, timeout: int = 30) -> requests.Response: + """ + Send a request to the agent. + + Args: + input_data: Input to send (string or structured message) + stream: Whether to use streaming + timeout: Request timeout in seconds + + Returns: + Response object + """ + payload = { + "agent": {"name": "mock_agent", "type": "agent_reference"}, + "input": input_data, + "stream": stream, + } + + # Note: Only set stream parameter for requests.post if streaming is requested + # Otherwise, let requests handle response body reading with timeout + if stream: + return requests.post(self.responses_endpoint, json=payload, timeout=timeout, stream=True) + else: + return requests.post(self.responses_endpoint, json=payload, timeout=timeout) + + def cleanup(self): + """Clean up resources and restore directory.""" + if self.process: + try: + self.process.terminate() + self.process.wait(timeout=5) + except Exception: + self.process.kill() + + os.chdir(self.original_dir) + + @staticmethod + def _find_free_port() -> int: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.bind(("127.0.0.1", 0)) + return sock.getsockname()[1] + + +class TestSimpleMockAgent: + """Test suite for Simple Mock Agent - uses shared server.""" + + @pytest.fixture(scope="class") + def mock_server(self): + """Shared server instance for all mock agent tests.""" + tester = BaseCustomAgentTest("simple_mock_agent", "custom_mock_agent_test.py") + tester.setup() + tester.start_server() + + if not tester.wait_for_ready(): + tester.cleanup() + pytest.fail("Simple Mock Agent server failed to start") + + yield tester + tester.cleanup() + + @pytest.mark.parametrize( + "input_text,expected_keywords,description", + [ + ("Hello, mock agent!", ["mock"], "simple_greeting"), + ("Test message", ["mock"], "test_message"), + ("What can you do?", ["mock"], "capability_query"), + ], + ) + def test_mock_agent_queries(self, mock_server, input_text: str, expected_keywords: list, description: str): + """Test mock agent with various queries.""" + response = mock_server.send_request(input_text, stream=False) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + response_text = response.text.lower() + found_keyword = any(kw.lower() in response_text for kw in expected_keywords) + assert found_keyword, f"Expected one of {expected_keywords} in response" + + def test_streaming_response(self, mock_server): + """Test mock agent with streaming response.""" + response = mock_server.send_request("Hello, streaming test!", stream=True) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + # Verify we can read streaming data + lines_read = 0 + for line in response.iter_lines(): + if line: + lines_read += 1 + if lines_read >= 3: + break + + assert lines_read > 0, "Expected to read at least one line from streaming response" + + +@pytest.mark.skip +class TestMcpSimple: + """Test suite for Custom MCP Simple - uses Microsoft Learn MCP.""" + + @pytest.fixture(scope="class") + def mcp_server(self): + """Shared server instance for all MCP Simple tests.""" + tester = BaseCustomAgentTest("mcp_simple", "mcp_simple.py") + tester.setup() + tester.start_server() + + if not tester.wait_for_ready(): + tester.cleanup() + pytest.fail("MCP Simple server failed to start") + + yield tester + tester.cleanup() + + @pytest.mark.parametrize( + "input_text,expected_keywords,description", + [ + ( + "What Azure services can I use for image generation?", + ["image", "generation", "azure"], + "image_generation", + ), + ( + "Show me documentation about Azure App Service", + ["app", "service", "azure"], + "app_service_docs", + ), + ], + ) + def test_mcp_operations(self, mcp_server, input_text: str, expected_keywords: list, description: str): + """Test MCP Simple with Microsoft Learn queries.""" + response = mcp_server.send_request(input_text, stream=False, timeout=60) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + response_text = response.text.lower() + found_keyword = any(kw.lower() in response_text for kw in expected_keywords) + assert found_keyword, f"Expected one of {expected_keywords} in response" + + +@pytest.mark.skip +class TestBilingualWeekendPlanner: + """Test suite for the bilingual weekend planner custom sample.""" + + @pytest.fixture(scope="class") + def weekend_planner_server(self): + """Shared server fixture for bilingual weekend planner tests.""" + pytest.importorskip("azure.identity") + pytest.importorskip("agents") + pytest.importorskip("openai") + + tester = BaseCustomAgentTest("bilingual_weekend_planner", "main.py") + tester.setup() + + env_overrides = { + "API_HOST": "github", + "GITHUB_TOKEN": os.environ.get("GITHUB_TOKEN", "unit-test-token"), + "GITHUB_OPENAI_BASE_URL": os.environ.get("GITHUB_OPENAI_BASE_URL", "http://127.0.0.1:65535"), + "WEEKEND_PLANNER_MODE": "container", + } + original_env = {key: os.environ.get(key) for key in env_overrides} + os.environ.update(env_overrides) + + try: + tester.start_server() + + if not tester.wait_for_ready(max_attempts=60, delay=1.0): + tester.cleanup() + pytest.fail("Bilingual weekend planner server failed to start") + + yield tester + finally: + tester.cleanup() + for key, value in original_env.items(): + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value + + def test_offline_planner_response(self, weekend_planner_server): + """Verify the planner responds with a graceful error when the model is unreachable.""" + response = weekend_planner_server.send_request("Plan my weekend in Seattle", stream=False, timeout=60) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + response_text = response.text.lower() + assert "error running agent" in response_text + + def test_streaming_offline_response(self, weekend_planner_server): + """Verify streaming responses deliver data even when the model call fails.""" + response = weekend_planner_server.send_request("Planifica mi fin de semana en Madrid", stream=True, timeout=60) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + lines_read = 0 + for line in response.iter_lines(): + if line: + lines_read += 1 + if lines_read >= 3: + break + + assert lines_read > 0, "Expected to read at least one line from streaming response" diff --git a/sdk/ai/azure-ai-agentserver-langgraph/CHANGELOG.md b/sdk/ai/azure-ai-agentserver-langgraph/CHANGELOG.md new file mode 100644 index 000000000000..7ce1742693b8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/CHANGELOG.md @@ -0,0 +1,7 @@ +# Release History + +## 1.0.0a1 (2025-11-06) + +### Features Added + +First version diff --git a/sdk/ai/azure-ai-agentserver-langgraph/LICENSE b/sdk/ai/azure-ai-agentserver-langgraph/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/MANIFEST.in b/sdk/ai/azure-ai-agentserver-langgraph/MANIFEST.in new file mode 100644 index 000000000000..452c12399ff7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/MANIFEST.in @@ -0,0 +1,9 @@ +include *.md +include LICENSE +recursive-include tests *.py +recursive-include samples *.py *.md +recursive-include doc *.rst *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/agentserver/__init__.py +include azure/ai/agentserver/langgraph/py.typed \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/README.md b/sdk/ai/azure-ai-agentserver-langgraph/README.md new file mode 100644 index 000000000000..e965d1a5972e --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/README.md @@ -0,0 +1,64 @@ +# Azure AI LangGraph Agent Server Adapter for Python + + +## Getting started + +```bash +pip install azure-ai-agentserver-langgraph +``` + + +## Key concepts + +Azure AI Agent Server wraps your LangGraph agent, and host it on the cloud. + + +## Examples + +```python +# your existing agent +from my_langgraph_agent import my_awesome_agent + +# langgraph utils +from azure.ai.agentserver.langgraph import from_langgraph + +if __name__ == "__main__": + # with this simple line, your agent will be hosted on http://localhost:8088 + from_langgraph(my_awesome_agent).run() + +``` + +**Note** +If your langgraph agent was not using langgraph's builtin [MessageState](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=messagesstate#messagesstate), you should implement your own `LanggraphStateConverter` and provide to `from_langgraph`. + +Reference this [example](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/main.py) for more details. + + +## Troubleshooting + +First run your agent with azure-ai-agentserver-langgraph locally. + +If it works on local by failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project. + + +## Next steps + +Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agentserver-langgraph/samples) folder. There are several samples for you to build your agent with azure-ai-agentserver + + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py new file mode 100644 index 000000000000..ed2e0d4d493a --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py @@ -0,0 +1,21 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +__path__ = __import__("pkgutil").extend_path(__path__, __name__) + +from typing import TYPE_CHECKING, Optional + +from ._version import VERSION + +if TYPE_CHECKING: # pragma: no cover + from . import models + + +def from_langgraph(agent, state_converter: Optional["models.LanggraphStateConverter"] = None): + from .langgraph import LangGraphAdapter + + return LangGraphAdapter(agent, state_converter=state_converter) + + +__all__ = ["from_langgraph"] +__version__ = VERSION diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/_version.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/_version.py new file mode 100644 index 000000000000..44465a1b2f12 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0a1" diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py new file mode 100644 index 000000000000..6a536673f2b9 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py @@ -0,0 +1,165 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=logging-fstring-interpolation,broad-exception-caught +# mypy: disable-error-code="assignment,arg-type" +import os +import re +from typing import Optional + +from langchain_core.runnables import RunnableConfig +from langgraph.graph.state import CompiledStateGraph + +from azure.ai.agentserver.core.constants import Constants +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.server.base import FoundryCBAgent +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + +from .models import ( + LanggraphMessageStateConverter, + LanggraphStateConverter, +) +from .models.utils import is_state_schema_valid + +logger = get_logger() + + +class LangGraphAdapter(FoundryCBAgent): + """ + Adapter for LangGraph Agent. + """ + + def __init__(self, graph: CompiledStateGraph, state_converter: Optional[LanggraphStateConverter] = None): + """ + Initialize the LangGraphAdapter with a CompiledStateGraph. + + :param graph: The LangGraph StateGraph to adapt. + :type graph: CompiledStateGraph + :param state_converter: custom state converter. Required if graph state is not MessagesState. + :type state_converter: Optional[LanggraphStateConverter] + """ + super().__init__() + self.graph = graph + self.azure_ai_tracer = None + if not state_converter: + if is_state_schema_valid(self.graph.builder.state_schema): + self.state_converter = LanggraphMessageStateConverter() + else: + raise ValueError("state_converter is required for non-MessagesState graph.") + else: + self.state_converter = state_converter + + async def agent_run(self, context: AgentRunContext): + input_data = self.state_converter.request_to_state(context) + logger.debug(f"Converted input data: {input_data}") + if not context.stream: + response = await self.agent_run_non_stream(input_data, context) + return response + return self.agent_run_astream(input_data, context) + + def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None): + # set env vars for langsmith + os.environ["LANGSMITH_OTEL_ENABLED"] = "true" + os.environ["LANGSMITH_TRACING"] = "true" + os.environ["LANGSMITH_OTEL_ONLY"] = "true" + if app_insights_conn_str: + # setup azure ai telemetry callbacks + try: + from langchain_azure_ai.callbacks.tracers import AzureAIOpenTelemetryTracer + + self.azure_ai_tracer = AzureAIOpenTelemetryTracer( + connection_string=app_insights_conn_str, + enable_content_recording=True, + name=self.get_agent_identifier(), + ) + logger.info("AzureAIOpenTelemetryTracer initialized successfully.") + except Exception as e: + logger.error(f"Failed to import AzureAIOpenTelemetryTracer, ignore: {e}") + + def setup_otlp_exporter(self, endpoint, provider): + endpoint = self.format_otlp_endpoint(endpoint) + return super().setup_otlp_exporter(endpoint, provider) + + def get_trace_attributes(self): + attrs = super().get_trace_attributes() + attrs["service.namespace"] = "azure.ai.agentshosting.langgraph" + return attrs + + async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext): + """ + Run the agent with non-streaming response. + + :param input_data: The input data to run the agent with. + :type input_data: dict + :param context: The context for the agent run. + :type context: AgentRunContext + + :return: The response of the agent run. + :rtype: dict + """ + + try: + config = self.create_runnable_config(context) + stream_mode = self.state_converter.get_stream_mode(context) + result = await self.graph.ainvoke(input_data, config=config, stream_mode=stream_mode) + output = self.state_converter.state_to_response(result, context) + return output + except Exception as e: + logger.error(f"Error during agent run: {e}") + raise e + + async def agent_run_astream(self, input_data: dict, context: AgentRunContext): + """ + Run the agent with streaming response. + + :param input_data: The input data to run the agent with. + :type input_data: dict + :param context: The context for the agent run. + :type context: AgentRunContext + + :return: An async generator yielding the response stream events. + :rtype: AsyncGenerator[dict] + """ + try: + logger.info(f"Starting streaming agent run {context.response_id}") + config = self.create_runnable_config(context) + stream_mode = self.state_converter.get_stream_mode(context) + stream = self.graph.astream(input=input_data, config=config, stream_mode=stream_mode) + async for result in self.state_converter.state_to_response_stream(stream, context): + yield result + except Exception as e: + logger.error(f"Error during streaming agent run: {e}") + raise e + + def create_runnable_config(self, context: AgentRunContext) -> RunnableConfig: + """ + Create a RunnableConfig from the converted request data. + + :param context: The context for the agent run. + :type context: AgentRunContext + + :return: The RunnableConfig for the agent run. + :rtype: RunnableConfig + """ + config = RunnableConfig( + configurable={ + "thread_id": context.conversation_id, + }, + callbacks=[self.azure_ai_tracer] if self.azure_ai_tracer else None, + ) + return config + + def format_otlp_endpoint(self, endpoint: str) -> str: + m = re.match(r"^(https?://[^/]+)", endpoint) + if m: + return f"{m.group(1)}/v1/traces" + return endpoint + + def get_agent_identifier(self) -> str: + agent_name = os.getenv(Constants.AGENT_NAME) + if agent_name: + return agent_name + agent_id = os.getenv(Constants.AGENT_ID) + if agent_id: + return agent_id + return "AgentsHosting-LangGraph" diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/__init__.py new file mode 100644 index 000000000000..eb6285a6279b --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/__init__.py @@ -0,0 +1,15 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from .langgraph_request_converter import LangGraphRequestConverter +from .langgraph_response_converter import LangGraphResponseConverter +from .langgraph_state_converter import LanggraphMessageStateConverter, LanggraphStateConverter +from .langgraph_stream_response_converter import LangGraphStreamResponseConverter + +__all__ = [ + "LangGraphRequestConverter", + "LangGraphResponseConverter", + "LangGraphStreamResponseConverter", + "LanggraphStateConverter", + "LanggraphMessageStateConverter", +] diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_request_converter.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_request_converter.py new file mode 100644 index 000000000000..03e64958d3b0 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_request_converter.py @@ -0,0 +1,156 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=logging-fstring-interpolation +# mypy: ignore-errors +import json +from typing import Dict, List + +from langchain_core.messages import ( + AIMessage, + AnyMessage, + HumanMessage, + SystemMessage, + ToolMessage, +) +from langchain_core.messages.tool import ToolCall + +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.models import CreateResponse, openai as openai_models, projects as project_models + +logger = get_logger() + +role_mapping = { + project_models.ResponsesMessageRole.USER: HumanMessage, + project_models.ResponsesMessageRole.SYSTEM: SystemMessage, + project_models.ResponsesMessageRole.ASSISTANT: AIMessage, + # project_models.ResponsesMessageRole.DEVELOPER: ToolMessage, +} + +item_content_type_mapping = { + project_models.ItemContentType.INPUT_TEXT: "text", + project_models.ItemContentType.INPUT_AUDIO: "audio", + project_models.ItemContentType.INPUT_IMAGE: "image", + project_models.ItemContentType.INPUT_FILE: "file", + project_models.ItemContentType.OUTPUT_TEXT: "text", + project_models.ItemContentType.OUTPUT_AUDIO: "audio", + # project_models.ItemContentType.REFUSAL: "refusal", +} + + +class LangGraphRequestConverter: + def __init__(self, data: CreateResponse): + self.data: CreateResponse = data + + def convert(self) -> dict: + # Convert the CreateRunRequest input to a format suitable for LangGraph + langgraph_input = {"messages": []} + + instructions = self.data.get("instructions") + if instructions and isinstance(instructions, str): + langgraph_input["messages"].append(SystemMessage(content=instructions)) + + input = self.data.get("input") + if isinstance(input, str): + langgraph_input["messages"].append(HumanMessage(content=input)) + elif isinstance(input, List): + for inner in input: + message = self.convert_input(inner) + langgraph_input["messages"].append(message) + else: + raise ValueError(f"Unsupported input type: {type(input)}, {input}") + return langgraph_input + + def convert_input(self, item: openai_models.ResponseInputItemParam) -> AnyMessage: + """ + Convert ResponseInputItemParam to a LangGraph message + + :param item: The ResponseInputItemParam to convert from request. + :type item: openai_models.ResponseInputItemParam + + :return: The converted LangGraph message. + :rtype: AnyMessage + """ + item_type = item.get("type", project_models.ItemType.MESSAGE) + if item_type == project_models.ItemType.MESSAGE: + # this is a message + return self.convert_message(item) + if item_type == project_models.ItemType.FUNCTION_CALL: + return self.convert_function_call(item) + if item_type == project_models.ItemType.FUNCTION_CALL_OUTPUT: + return self.convert_function_call_output(item) + raise ValueError(f"Unsupported OpenAIItemParam type: {item_type}, {item}") + + def convert_message(self, message: dict) -> AnyMessage: + """ + Convert a message dict to a LangGraph message + + :param message: The message dict to convert. + :type message: dict + + :return: The converted LangGraph message. + :rtype: AnyMessage + """ + content = message.get("content") + role = message.get("role", project_models.ResponsesMessageRole.USER) + if not content: + raise ValueError(f"Message missing content: {message}") + if isinstance(content, str): + return role_mapping[role](content=content) + if isinstance(content, list): + return role_mapping[role](content=self.convert_OpenAIItemContentList(content)) + raise ValueError(f"Unsupported ResponseMessagesItemParam content type: {type(content)}, {content}") + + def convert_function_call(self, item: dict) -> AnyMessage: + try: + item = openai_models.ResponseFunctionToolCallParam(**item) + argument = item.get("arguments", None) + args = json.loads(argument) if argument else {} + except json.JSONDecodeError as e: + raise ValueError(f"Invalid JSON in function call arguments: {item}") from e + except Exception as e: + raise ValueError(f"Invalid function call item: {item}") from e + return AIMessage(tool_calls=[ToolCall(id=item.get("call_id"), name=item.get("name"), args=args)], content="") + + def convert_function_call_output(self, item: dict) -> ToolMessage: + try: + item = openai_models.response_input_item_param.FunctionCallOutput(**item) # pylint: disable=no-member + except Exception as e: + raise ValueError(f"Invalid function call output item: {item}") from e + + output = item.get("output", None) + if isinstance(output, str): + return ToolMessage(content=output, tool_call_id=item.get("call_id")) + if isinstance(output, list): + return ToolMessage(content=self.convert_OpenAIItemContentList(output), tool_call_id=item.get("call_id")) + raise ValueError(f"Unsupported function call output type: {type(output)}, {output}") + + def convert_OpenAIItemContentList(self, content: List[Dict]) -> List[Dict]: + """ + Convert ItemContent to a list format + + :param content: The list of ItemContent to convert. + :type content: List[Dict] + + :return: The converted list of ItemContent. + :rtype: List[Dict] + """ + result = [] + for item in content: + result.append(self.convert_OpenAIItemContent(item)) + return result + + def convert_OpenAIItemContent(self, content: Dict) -> Dict: + """ + Convert ItemContent to a dict format + + :param content: The ItemContent to convert. + :type content: Dict + + :return: The converted ItemContent. + :rtype: Dict + """ + res = content.copy() + content_type = content.get("type") + res["type"] = item_content_type_mapping.get(content_type, content_type) + return res diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_response_converter.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_response_converter.py new file mode 100644 index 000000000000..62560279cdc6 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_response_converter.py @@ -0,0 +1,136 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=logging-fstring-interpolation,broad-exception-caught,logging-not-lazy +# mypy: disable-error-code="valid-type,call-overload,attr-defined" +import copy +from typing import List + +from langchain_core import messages +from langchain_core.messages import AnyMessage + +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + +from .utils import extract_function_call + +logger = get_logger() + + +class LangGraphResponseConverter: + def __init__(self, context: AgentRunContext, output): + self.context = context + self.output = output + + def convert(self) -> list[project_models.ItemResource]: + res = [] + for step in self.output: + for node_name, node_output in step.items(): + message_arr = node_output.get("messages") + if not message_arr: + logger.warning(f"No messages found in node {node_name} output: {node_output}") + continue + for message in message_arr: + try: + converted = self.convert_output_message(message) + res.append(converted) + except Exception as e: + logger.error(f"Error converting message {message}: {e}") + return res + + def convert_output_message(self, output_message: AnyMessage): # pylint: disable=inconsistent-return-statements + # Implement the conversion logic for inner inputs + if isinstance(output_message, messages.HumanMessage): + return project_models.ResponsesUserMessageItemResource( + content=self.convert_MessageContent( + output_message.content, role=project_models.ResponsesMessageRole.USER + ), + id=self.context.id_generator.generate_message_id(), + status="completed", # temporary status, can be adjusted based on actual logic + ) + if isinstance(output_message, messages.SystemMessage): + return project_models.ResponsesSystemMessageItemResource( + content=self.convert_MessageContent( + output_message.content, role=project_models.ResponsesMessageRole.SYSTEM + ), + id=self.context.id_generator.generate_message_id(), + status="completed", + ) + if isinstance(output_message, messages.AIMessage): + if output_message.tool_calls: + # If there are tool calls, we assume there is only ONE function call + if len(output_message.tool_calls) > 1: + logger.warning( + f"There are {len(output_message.tool_calls)} tool calls found. " + + "Only the first one will be processed." + ) + tool_call = output_message.tool_calls[0] + name, call_id, argument = extract_function_call(tool_call) + return project_models.FunctionToolCallItemResource( + call_id=call_id, + name=name, + arguments=argument, + id=self.context.id_generator.generate_function_call_id(), + status="completed", + ) + return project_models.ResponsesAssistantMessageItemResource( + content=self.convert_MessageContent( + output_message.content, role=project_models.ResponsesMessageRole.ASSISTANT + ), + id=self.context.id_generator.generate_message_id(), + status="completed", + ) + if isinstance(output_message, messages.ToolMessage): + return project_models.FunctionToolCallOutputItemResource( + call_id=output_message.tool_call_id, + output=output_message.content, + id=self.context.id_generator.generate_function_output_id(), + ) + + def convert_MessageContent( + self, content, role: project_models.ResponsesMessageRole + ) -> List[project_models.ItemContent]: + if isinstance(content, str): + return [self.convert_MessageContentItem(content, role)] + return [self.convert_MessageContentItem(item, role) for item in content] + + def convert_MessageContentItem( + self, content, role: project_models.ResponsesMessageRole + ) -> project_models.ItemContent: + content_dict = copy.deepcopy(content) if isinstance(content, dict) else {"text": content} + + content_type = None + if isinstance(content, str): + langgraph_content_type = "text" + else: + langgraph_content_type = content.get("type", "text") + + if langgraph_content_type == "text": + if role == project_models.ResponsesMessageRole.ASSISTANT: + content_type = project_models.ItemContentType.OUTPUT_TEXT + else: + content_type = project_models.ItemContentType.INPUT_TEXT + elif langgraph_content_type == "image": + if role == project_models.ResponsesMessageRole.USER: + content_type = project_models.ItemContentType.INPUT_IMAGE + else: + raise ValueError("Image content from assistant is not supported") + elif langgraph_content_type == "audio": + if role == project_models.ResponsesMessageRole.USER: + content_type = project_models.ItemContentType.INPUT_AUDIO + else: + content_type = project_models.ItemContentType.OUTPUT_AUDIO + elif langgraph_content_type == "file": + if role == project_models.ResponsesMessageRole.USER: + content_type = project_models.ItemContentType.INPUT_FILE + else: + raise ValueError("File content from assistant is not supported") + else: + raise ValueError(f"Unsupported content: {content}") + + content_dict["type"] = content_type + if content_type == project_models.ItemContentType.OUTPUT_TEXT: + content_dict["annotations"] = [] # annotation is required for output_text + + return project_models.ItemContent(content_dict) diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_state_converter.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_state_converter.py new file mode 100644 index 000000000000..a1bc2181f919 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_state_converter.py @@ -0,0 +1,143 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# mypy: disable-error-code="call-overload,override" +"""Base interface for converting between LangGraph internal state and OpenAI-style responses. + +A LanggraphStateConverter implementation bridges: + 1. Incoming CreateResponse (wrapped in AgentRunContext) -> initial graph state + 2. Internal graph state -> final non-streaming Response + 3. Streaming graph state events -> ResponseStreamEvent sequence + 4. Declares which stream mode (if any) is supported for a given run context + +Concrete implementations should: + * Decide and document the shape of the state dict they return in request_to_state + * Handle aggregation, error mapping, and metadata propagation in state_to_response + * Incrementally translate async stream_state items in state_to_response_stream + +Do NOT perform network I/O directly inside these methods (other than awaiting the +provided async iterator). Keep them pure transformation layers so they are testable. +""" + +from __future__ import annotations + +import time +from abc import ABC, abstractmethod +from typing import Any, AsyncGenerator, AsyncIterator, Dict + +from azure.ai.agentserver.core.models import Response, ResponseStreamEvent +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + +from .langgraph_request_converter import LangGraphRequestConverter +from .langgraph_response_converter import LangGraphResponseConverter +from .langgraph_stream_response_converter import LangGraphStreamResponseConverter + + +class LanggraphStateConverter(ABC): + """ + Abstract base class for LangGraph state <-> response conversion. + + :meta private: + """ + + @abstractmethod + def get_stream_mode(self, context: AgentRunContext) -> str: + """Return a string indicating streaming mode for this run. + + Examples: "values", "updates", "messages", "custom", "debug". + Implementations may inspect context.request.stream or other flags. + Must be fast and side-effect free. + + :param context: The context for the agent run. + :type context: AgentRunContext + + :return: The streaming mode as a string. + :rtype: str + """ + + @abstractmethod + def request_to_state(self, context: AgentRunContext) -> Dict[str, Any]: + """Convert the incoming request (via context) to an initial LangGraph state. + + Return a serializable dict that downstream graph execution expects. + Should not mutate the context. Raise ValueError on invalid input. + + :param context: The context for the agent run. + :type context: AgentRunContext + + :return: The initial LangGraph state as a dictionary. + :rtype: Dict[str, Any] + """ + + @abstractmethod + def state_to_response(self, state: Any, context: AgentRunContext) -> Response: + """Convert a completed LangGraph state into a final non-streaming Response object. + + Implementations must construct and return an models.Response. + The returned object should include output items, usage (if available), + and reference the agent / conversation from context. + + :param state: The completed LangGraph state. + :type state: Any + :param context: The context for the agent run. + :type context: AgentRunContext + + :return: The final non-streaming Response object. + :rtype: Response + """ + + @abstractmethod + async def state_to_response_stream( + self, stream_state: AsyncIterator[Dict[str, Any] | Any], context: AgentRunContext + ) -> AsyncGenerator[ResponseStreamEvent, None]: + """Convert an async iterator of partial state updates into stream events. + + Yield ResponseStreamEvent objects in the correct order. Implementations + are responsible for emitting lifecycle events (created, in_progress, deltas, + completed, errors) consistent with the OpenAI Responses streaming contract. + + :param stream_state: An async iterator of partial LangGraph state updates. + :type stream_state: AsyncIterator[Dict[str, Any] | Any] + :param context: The context for the agent run. + :type context: AgentRunContext + + :return: An async generator yielding ResponseStreamEvent objects. + :rtype: AsyncGenerator[ResponseStreamEvent, None] + """ + + +class LanggraphMessageStateConverter(LanggraphStateConverter): + """Converter implementation for langgraph built-in MessageState.""" + + def get_stream_mode(self, context: AgentRunContext) -> str: + if context.request.get("stream"): + return "messages" + return "updates" + + def request_to_state(self, context: AgentRunContext) -> Dict[str, Any]: + converter = LangGraphRequestConverter(context.request) + return converter.convert() + + def state_to_response(self, state: Any, context: AgentRunContext) -> Response: + converter = LangGraphResponseConverter(context, state) + output = converter.convert() + + agent_id = context.get_agent_id_object() + conversation = context.get_conversation_object() + response = Response( + object="response", + id=context.response_id, + agent=agent_id, + conversation=conversation, + metadata=context.request.get("metadata"), + created_at=int(time.time()), + output=output, + ) + return response + + async def state_to_response_stream( + self, stream_state: AsyncIterator[Dict[str, Any] | Any], context: AgentRunContext + ) -> AsyncGenerator[ResponseStreamEvent, None]: + response_converter = LangGraphStreamResponseConverter(stream_state, context) + async for result in response_converter.convert(): + yield result diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_stream_response_converter.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_stream_response_converter.py new file mode 100644 index 000000000000..cba1db014ed8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/langgraph_stream_response_converter.py @@ -0,0 +1,74 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=logging-fstring-interpolation +# mypy: disable-error-code="assignment,valid-type" +from typing import List + +from langchain_core.messages import AnyMessage + +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.models import ResponseStreamEvent +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + +from .response_event_generators import ( + ResponseEventGenerator, + ResponseStreamEventGenerator, + StreamEventState, +) + +logger = get_logger() + + +class LangGraphStreamResponseConverter: + def __init__(self, stream, context: AgentRunContext): + self.stream = stream + self.context = context + + self.stream_state = StreamEventState() + self.current_generator: ResponseEventGenerator = None + + async def convert(self): + async for message, _ in self.stream: + try: + if self.current_generator is None: + self.current_generator = ResponseStreamEventGenerator(logger, None) + + converted = self.try_process_message(message, self.context) + for event in converted: + yield event # yield each event separately + except Exception as e: + logger.error(f"Error converting message {message}: {e}") + raise ValueError(f"Error converting message {message}") from e + + logger.info("Stream ended, finalizing response.") + # finalize the stream + converted = self.try_process_message(None, self.context) + for event in converted: + yield event # yield each event separately + + def try_process_message(self, event: AnyMessage, context: AgentRunContext) -> List[ResponseStreamEvent]: + if event and not self.current_generator: + self.current_generator = ResponseStreamEventGenerator(logger, None) + + is_processed = False + next_processor = self.current_generator + returned_events = [] + while not is_processed: + is_processed, next_processor, processed_events = self.current_generator.try_process_message( + event, context, self.stream_state + ) + returned_events.extend(processed_events) + if not is_processed and next_processor == self.current_generator: + logger.warning( + f"Message can not be processed by current generator {type(self.current_generator).__name__}:" + + f" {type(event)}: {event}" + ) + break + if next_processor != self.current_generator: + logger.info( + f"Switching processor from {type(self.current_generator).__name__} " + + f"to {type(next_processor).__name__}" + ) + self.current_generator = next_processor + return returned_events diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/__init__.py new file mode 100644 index 000000000000..7b9f0362e4ba --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/__init__.py @@ -0,0 +1,11 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from .response_event_generator import ResponseEventGenerator, StreamEventState +from .response_stream_event_generator import ResponseStreamEventGenerator + +__all__ = [ + "ResponseEventGenerator", + "ResponseStreamEventGenerator", + "StreamEventState", +] diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py new file mode 100644 index 000000000000..ae169d866ee5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py @@ -0,0 +1,61 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from azure.ai.agentserver.core.models import projects as project_models + + +class ItemContentHelper: + def __init__(self, content_type: str): + self.content_type = content_type + self.has_aggregated_content = False + + def create_item_content(self) -> project_models.ItemContent: + return project_models.ItemContent( + type=self.content_type, + ) + + +class InputTextItemContentHelper(ItemContentHelper): + def __init__(self): + super().__init__(project_models.ItemContentType.INPUT_TEXT) + self.text = "" + + def create_item_content(self): + return project_models.ItemContentInputText(text=self.text) + + def aggregate_content(self, item): + self.has_aggregated_content = True + if isinstance(item, str): + self.text += item + return + if not isinstance(item, dict): + return + text = item.get("text") + if isinstance(text, str): + self.text += text + + +class OutputTextItemContentHelper(ItemContentHelper): + def __init__(self): + super().__init__(project_models.ItemContentType.OUTPUT_TEXT) + self.text = "" + self.annotations = [] + self.logprobs = [] + + def create_item_content(self): + return project_models.ItemContentOutputText( + text=self.text, + annotations=self.annotations, + logprobs=self.logprobs, + ) + + def aggregate_content(self, item): + self.has_aggregated_content = True + if isinstance(item, str): + self.text += item + return + if not isinstance(item, dict): + return + text = item.get("text") + if isinstance(text, str): + self.text += text diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py new file mode 100644 index 000000000000..a1c97423d5ae --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py @@ -0,0 +1,114 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# mypy: disable-error-code="assignment" +from azure.ai.agentserver.core.models import projects as project_models + +from ..utils import extract_function_call + + +class ItemResourceHelper: + def __init__(self, item_type: str, item_id: str = None): + self.item_type = item_type + self.item_id = item_id + + def create_item_resource(self, is_done: bool): + pass + + def add_aggregate_content(self, item): + pass + + def get_aggregated_content(self): + pass + + +class FunctionCallItemResourceHelper(ItemResourceHelper): + def __init__(self, item_id: str = None, tool_call: dict = None): + super().__init__(project_models.ItemType.FUNCTION_CALL, item_id) + self.call_id = None + self.name = None + self.arguments = "" + if tool_call: + self.name, self.call_id, _ = extract_function_call(tool_call) + + def create_item_resource(self, is_done: bool): + content = { + "id": self.item_id, + "type": self.item_type, + "call_id": self.call_id, + "name": self.name, + "arguments": self.arguments if self.arguments else "", + "status": "in_progress" if not is_done else "completed", + } + return project_models.ItemResource(content) + + def add_aggregate_content(self, item): + if isinstance(item, str): + self.arguments += item + return + if not isinstance(item, dict): + return + if item.get("type") != project_models.ItemType.FUNCTION_CALL: + return + _, _, argument = extract_function_call(item) + if argument: + self.arguments += argument + + def get_aggregated_content(self): + return self.create_item_resource(is_done=True) + + +class FunctionCallOutputItemResourceHelper(ItemResourceHelper): + def __init__(self, item_id: str = None, call_id: str = None): + super().__init__(project_models.ItemType.FUNCTION_CALL_OUTPUT, item_id) + self.call_id = call_id + self.content = "" + + def create_item_resource(self, is_done: bool): + content = { + "id": self.item_id, + "type": self.item_type, + "status": "in_progress" if not is_done else "completed", + "call_id": self.call_id, + "output": self.content, + } + return project_models.ItemResource(content) + + def add_aggregate_content(self, item): + if isinstance(item, str): + self.content += item + return + if not isinstance(item, dict): + return + content = item.get("text") + if isinstance(content, str): + self.content += content + + def get_aggregated_content(self): + return self.create_item_resource(is_done=True) + + +class MessageItemResourceHelper(ItemResourceHelper): + def __init__(self, item_id: str, role: project_models.ResponsesMessageRole): + super().__init__(project_models.ItemType.MESSAGE, item_id) + self.role = role + self.content: list[project_models.ItemContent] = [] + + def create_item_resource(self, is_done: bool): + content = { + "id": self.item_id, + "type": self.item_type, + "status": "in_progress" if not is_done else "completed", + "content": self.content, + "role": self.role, + } + return project_models.ItemResource(content) + + def add_aggregate_content(self, item): + if isinstance(item, dict): + item = project_models.ItemContent(item) + if isinstance(item, project_models.ItemContent): + self.content.append(item) + + def get_aggregated_content(self): + return self.create_item_resource(is_done=True) diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py new file mode 100644 index 000000000000..fe141887a2b2 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py @@ -0,0 +1,153 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=unused-argument,consider-using-in,consider-merging-isinstance +# mypy: ignore-errors +from typing import List + +from langchain_core import messages as langgraph_messages + +from azure.ai.agentserver.core.models import projects as project_models + +from . import item_content_helpers +from .response_event_generator import ResponseEventGenerator, StreamEventState +from .response_output_text_event_generator import ResponseOutputTextEventGenerator + + +class ResponseContentPartEventGenerator(ResponseEventGenerator): + def __init__( + self, + logger, + parent: ResponseEventGenerator, + item_id: str, + message_id: str, + output_index: int, + content_index: int, + ): + super().__init__(logger, parent) + self.output_index = output_index + self.content_index = content_index + self.item_id = item_id + self.message_id = message_id + self.aggregated_content = "" + self.item_content_helper = None + + def try_process_message( + self, message, context, stream_state: StreamEventState + ) -> tuple[bool, ResponseEventGenerator, List[project_models.ResponseStreamEvent]]: + is_processed = False + events = [] + next_processor = self + if not self.item_content_helper: + if not self.try_create_item_content_helper(message): + # cannot create item content, skip this message + self.logger.warning(f"Cannot create item content helper for message: {message}") + return True, self, [] + if self.item_content_helper and not self.started: + self.started, start_events = self.on_start(message, context, stream_state) + if not self.started: + # could not start processing, skip this message + return True, self, [] + events.extend(start_events) + + if self.should_end(message): + complete_events = self.on_end(message, context, stream_state) + events.extend(complete_events) + next_processor = self.parent + is_processed = self.has_finish_reason(message) if message else False + return is_processed, next_processor, events + + child_processor = self.create_child_processor(message) + if child_processor: + next_processor = child_processor + + return is_processed, next_processor, events + + def on_start( # mypy: ignore[override] + self, event, run_details, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + if self.started: + return False, [] + + start_event = project_models.ResponseContentPartAddedEvent( + item_id=self.item_id, + output_index=self.output_index, + content_index=self.content_index, + part=self.item_content_helper.create_item_content(), + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + self.started = True + + return True, [start_event] + + def on_end(self, message, context, stream_state: StreamEventState + ) -> List[project_models.ResponseStreamEvent]: # mypy: ignore[override] + aggregated_content = self.item_content_helper.create_item_content() + done_event = project_models.ResponseContentPartDoneEvent( + item_id=self.item_id, + output_index=self.output_index, + content_index=self.content_index, + part=aggregated_content, + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + if self.parent: + self.parent.aggregate_content(aggregated_content.as_dict()) + return [done_event] + + def try_create_item_content_helper(self, message): + if isinstance(message, langgraph_messages.AIMessage) or isinstance(message, langgraph_messages.ToolMessage): + if self.is_text_content(message.content): + self.item_content_helper = item_content_helpers.OutputTextItemContentHelper() + return True + if isinstance(message, langgraph_messages.HumanMessage) or isinstance( + message, langgraph_messages.SystemMessage + ): + if self.is_text_content(message.content): + self.item_content_helper = item_content_helpers.InputTextItemContentHelper() + return True + return False + + def aggregate_content(self, content): + return self.item_content_helper.aggregate_content(content) + + def is_text_content(self, content): + if isinstance(content, str): + return True + if isinstance(content, list) and all(isinstance(c, str) for c in content): + return True + return False + + def create_child_processor(self, message) -> ResponseEventGenerator: + if ( + self.item_content_helper.content_type == project_models.ItemContentType.INPUT_TEXT + or self.item_content_helper.content_type == project_models.ItemContentType.OUTPUT_TEXT + ): + return ResponseOutputTextEventGenerator( + logger=self.logger, + parent=self, + content_index=self.content_index, + output_index=self.output_index, + item_id=self.item_id, + message_id=self.message_id, + ) + raise ValueError(f"Unsupported item content type for child processor: {self.item_content_helper.content_type}") + + def has_finish_reason(self, message) -> bool: + if not isinstance(message, langgraph_messages.BaseMessageChunk): + return False + if message.response_metadata and message.response_metadata.get("finish_reason"): + return True + return False + + def should_end(self, event) -> bool: + # Determine if the event indicates end of the stream for this item + if event is None: + return True + if event.id != self.message_id: + return True + # if is Message not MessageChunk, should create child and end in the second iteration + if not isinstance(event, langgraph_messages.BaseMessageChunk): + return self.item_content_helper.has_aggregated_content + return False diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py new file mode 100644 index 000000000000..ee19ca74f4bb --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py @@ -0,0 +1,92 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=unused-argument,unnecessary-pass +# mypy: disable-error-code="valid-type" +from typing import List + +from langchain_core.messages import AnyMessage + +from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + + +class StreamEventState: + """ + :meta private: + State information for the stream event processing. + """ + + sequence_number: int = 0 + + +class ResponseEventGenerator: + """ + :meta private: + Abstract base class for response event generators. + """ + + started: bool = False + + def __init__(self, logger, parent): + self.logger = logger + self.parent = parent # parent generator + + def try_process_message( + self, + message: AnyMessage, # mypy: ignore[valid-type] + context: AgentRunContext, + stream_state: StreamEventState + ): # mypy: ignore[empty-body] + """ + Try to process the incoming message. + + :param message: The incoming message to process. + :type message: AnyMessage + :param context: The agent run context. + :type context: AgentRunContext + :param stream_state: The current stream event state. + :type stream_state: StreamEventState + + :return: tuple of (is_processed, next_processor, events) + :rtype: tuple[bool, ResponseEventGenerator, List[ResponseStreamEvent]] + """ + pass + + def on_start(self) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + """ + Generate the starting events for this layer. + + :return: tuple of (started, events) + :rtype: tuple[bool, List[ResponseStreamEvent]] + """ + return False, [] + + def on_end( + self, message: AnyMessage, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + """ + Generate the ending events for this layer. + TODO: handle different end conditions, e.g. normal end, error end, etc. + + :param message: The incoming message to process. + :type message: AnyMessage + :param context: The agent run context. + :type context: AgentRunContext + :param stream_state: The current stream event state. + :type stream_state: StreamEventState + + :return: tuple of (started, events) + :rtype: tuple[bool, List[ResponseStreamEvent]] + """ + return False, [] + + def aggregate_content(self): + """ + Aggregate the content for this layer. + It is called by its child processor to pass up aggregated content. + + :return: content from child processor + :rtype: str | dict + """ + pass diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py new file mode 100644 index 000000000000..dbaed3ac9258 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py @@ -0,0 +1,126 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=unused-argument,name-too-long +# mypy: ignore-errors +from typing import List + +from langchain_core import messages as langgraph_messages +from langchain_core.messages import AnyMessage + +from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + +from ..utils import extract_function_call +from . import ResponseEventGenerator, StreamEventState + + +class ResponseFunctionCallArgumentEventGenerator(ResponseEventGenerator): + def __init__(self, logger, parent: ResponseEventGenerator, item_id, message_id, output_index: int): + super().__init__(logger, parent) + self.item_id = item_id + self.output_index = output_index + self.aggregated_content = "" + self.message_id = message_id + + def try_process_message( + self, message, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, ResponseEventGenerator, List[project_models.ResponseStreamEvent]]: + is_processed = False + events = [] + next_processor = self + if not self.started: + self.started = True # does not need to do anything special on start + + is_processed, next_processor, processed_events = self.process(message, context, stream_state) + if not is_processed: + self.logger.warning(f"FunctionCallArgumentEventGenerator did not process message: {message}") + events.extend(processed_events) + + if self.should_end(message): + has_finish_reason = self.has_finish_reason(message) + complete_events = self.on_end(message, context, stream_state) + events.extend(complete_events) + next_processor = self.parent + is_processed = has_finish_reason # if has finish reason, mark as processed and stop further processing + + return is_processed, next_processor, events + + def on_start( + self, event: AnyMessage, run_details, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + if self.started: + return True, [] + self.started = True + return True, [] + + def process( + self, message: AnyMessage, run_details, stream_state: StreamEventState + ) -> tuple[bool, ResponseEventGenerator, List[project_models.ResponseStreamEvent]]: + tool_call = self.get_tool_call_info(message) + if tool_call: + _, _, argument = extract_function_call(tool_call) + if argument: + argument_delta_event = project_models.ResponseFunctionCallArgumentsDeltaEvent( + item_id=self.item_id, + output_index=self.output_index, + delta=argument, + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + self.aggregated_content += argument + return True, self, [argument_delta_event] + return False, self, [] + + def has_finish_reason(self, message: AnyMessage) -> bool: + if not message or message.id != self.message_id: + return False + if isinstance(message, langgraph_messages.AIMessageChunk): + if not message.tool_call_chunks: + # new tool call started, end this argument processing + return True + if message.response_metadata.get("finish_reason"): + # tool call finished + return True + elif isinstance(message, langgraph_messages.AIMessage): + return True + return False + + def should_end(self, event: AnyMessage) -> bool: + if event is None: + return True + if event.id != self.message_id: + return True + return False + + def on_end( + self, message: AnyMessage, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + done_event = project_models.ResponseFunctionCallArgumentsDoneEvent( + item_id=self.item_id, + output_index=self.output_index, + arguments=self.aggregated_content, + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + self.parent.aggregate_content(self.aggregated_content) # pass aggregated content to parent + return [done_event] + + def get_tool_call_info(self, message: langgraph_messages.AnyMessage): + if isinstance(message, langgraph_messages.AIMessageChunk): + if message.tool_call_chunks: + if len(message.tool_call_chunks) > 1: + self.logger.warning( + f"There are {len(message.tool_call_chunks)} tool calls found. " + + "Only the first one will be processed." + ) + return message.tool_call_chunks[0] + elif isinstance(message, langgraph_messages.AIMessage): + if message.tool_calls: + if len(message.tool_calls) > 1: + self.logger.warning( + f"There are {len(message.tool_calls)} tool calls found. " + + "Only the first one will be processed." + ) + return message.tool_calls[0] + return None diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py new file mode 100644 index 000000000000..a2606d1541c1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py @@ -0,0 +1,163 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=unused-argument +# mypy: ignore-errors +from typing import List + +from langchain_core import messages as langgraph_messages +from langchain_core.messages import AnyMessage + +from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext +from azure.ai.agentserver.core.server.common.id_generator.id_generator import IdGenerator + +from . import ResponseEventGenerator, StreamEventState, item_resource_helpers +from .response_content_part_event_generator import ResponseContentPartEventGenerator +from .response_function_call_argument_event_generator import ResponseFunctionCallArgumentEventGenerator + + +class ResponseOutputItemEventGenerator(ResponseEventGenerator): + def __init__(self, logger, parent: ResponseEventGenerator, output_index: int, message_id: str = None): + super().__init__(logger, parent) + self.output_index = output_index + self.message_id = message_id + self.item_resource_helper = None + + def try_process_message( + self, message: AnyMessage, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, ResponseEventGenerator, List[project_models.ResponseStreamEvent]]: + is_processed = False + next_processor = self + events = [] + if self.item_resource_helper is None: + if not self.try_create_item_resource_helper(message, context.id_generator): + # cannot create item resource, skip this message + self.logger.warning(f"Cannot create item resource helper for message: {message}, skipping.") + return True, self, [] + + if self.item_resource_helper and not self.started: + self.started, start_events = self.on_start(message, context, stream_state) + if not self.started: + # could not start processing, skip this message + self.logger.warning(f"Cannot create start events for message: {message}, skipping.") + return True, self, [] + events.extend(start_events) + + if self.should_end(message): + # not the message this processor is handling + complete_events = self.on_end(message, context, stream_state) + is_processed = self.message_id == message.id if message else False + next_processor = self.parent + events.extend(complete_events) + return is_processed, next_processor, events + + child_processor = self.create_child_processor(message) + if child_processor: + self.logger.info(f"Created child processor: {child_processor}") + return False, child_processor, events + + if message: + # no child processor, process the content directly + self.aggregate_content(message.content) + is_processed = True + + return is_processed, next_processor, events + + def on_start( + self, event: AnyMessage, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + if self.started: + return True, [] + + item_resource = self.item_resource_helper.create_item_resource(is_done=False) + if item_resource is None: + # cannot know what item resource to create + return False, None + item_added_event = project_models.ResponseOutputItemAddedEvent( + output_index=self.output_index, + sequence_number=stream_state.sequence_number, + item=item_resource, + ) + stream_state.sequence_number += 1 + self.started = True + return True, [item_added_event] + + def should_end(self, event: AnyMessage) -> bool: + if event is None: + self.logger.info("Received None event, ending processor.") + return True + if event.id != self.message_id: + return True + return False + + def on_end( + self, message: AnyMessage, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + if not self.started: # should not happen + return [] + + item_resource = self.item_resource_helper.create_item_resource(is_done=True) + # response item done event + done_event = project_models.ResponseOutputItemDoneEvent( + output_index=self.output_index, + sequence_number=stream_state.sequence_number, + item=item_resource, + ) + stream_state.sequence_number += 1 + self.parent.aggregate_content(item_resource) # pass aggregated content to parent + return [done_event] + + def aggregate_content(self, content): + # aggregate content from child processor + self.item_resource_helper.add_aggregate_content(content) + + def try_create_item_resource_helper(self, event: AnyMessage, id_generator: IdGenerator): # pylint: disable=too-many-return-statements + if isinstance(event, langgraph_messages.AIMessageChunk) and event.tool_call_chunks: + self.item_resource_helper = item_resource_helpers.FunctionCallItemResourceHelper( + item_id=id_generator.generate_function_call_id(), tool_call=event.tool_call_chunks[0] + ) + return True + if isinstance(event, langgraph_messages.AIMessage) and event.tool_calls: + self.item_resource_helper = item_resource_helpers.FunctionCallItemResourceHelper( + item_id=id_generator.generate_function_call_id(), tool_call=event.tool_calls[0] + ) + return True + if isinstance(event, langgraph_messages.AIMessage) and event.content: + self.item_resource_helper = item_resource_helpers.MessageItemResourceHelper( + item_id=id_generator.generate_message_id(), role=project_models.ResponsesMessageRole.ASSISTANT + ) + return True + if isinstance(event, langgraph_messages.HumanMessage) and event.content: + self.item_resource_helper = item_resource_helpers.MessageItemResourceHelper( + item_id=id_generator.generate_message_id(), role=project_models.ResponsesMessageRole.USER + ) + return True + if isinstance(event, langgraph_messages.SystemMessage) and event.content: + self.item_resource_helper = item_resource_helpers.MessageItemResourceHelper( + item_id=id_generator.generate_message_id(), role=project_models.ResponsesMessageRole.SYSTEM + ) + return True + if isinstance(event, langgraph_messages.ToolMessage): + self.item_resource_helper = item_resource_helpers.FunctionCallOutputItemResourceHelper( + item_id=id_generator.generate_function_output_id(), call_id=event.tool_call_id + ) + return True + return False + + def create_child_processor(self, message: AnyMessage): + if self.item_resource_helper is None: + return None + if self.item_resource_helper.item_type == project_models.ItemType.FUNCTION_CALL: + return ResponseFunctionCallArgumentEventGenerator( + self.logger, + self, + item_id=self.item_resource_helper.item_id, + message_id=message.id, + output_index=self.output_index, + ) + if self.item_resource_helper.item_type == project_models.ItemType.MESSAGE: + return ResponseContentPartEventGenerator( + self.logger, self, self.item_resource_helper.item_id, message.id, self.output_index, content_index=0 + ) + return None diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py new file mode 100644 index 000000000000..b6be81ec7cb2 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py @@ -0,0 +1,112 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=unused-argument +# mypy: disable-error-code="return-value,assignment" +from typing import List + +from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + +from .response_event_generator import ( + ResponseEventGenerator, + StreamEventState, +) + + +class ResponseOutputTextEventGenerator(ResponseEventGenerator): + def __init__( + self, + logger, + parent: ResponseEventGenerator, + content_index: int, + output_index: int, + item_id: str, + message_id: str, + ): + super().__init__(logger, parent) + self.output_index = output_index + self.content_index = content_index + self.item_id = item_id + self.message_id = message_id + self.aggregated_content = "" + + def try_process_message( + self, message, context, stream_state: StreamEventState + ) -> tuple[bool, ResponseEventGenerator, List[project_models.ResponseStreamEvent]]: + is_processed = False + events = [] + next_processor = self + if not self.started: + self.started = True + + if message: + is_processed, next_processor, processed_events = self.process(message, context, stream_state) + if not is_processed: + self.logger.warning(f"OutputTextEventGenerator did not process message: {message}") + events.extend(processed_events) + + if self.should_end(message): + is_processed, complete_events = self.on_end(message, context, stream_state) + events.extend(complete_events) + next_processor = self.parent + + return is_processed, next_processor, events + + def process( + self, message, run_details, stream_state: StreamEventState + ) -> tuple[bool, ResponseEventGenerator, List[project_models.ResponseStreamEvent]]: + if message and message.content: + content = [message.content] if isinstance(message.content, str) else message.content + res = [] + for item in content: + if not isinstance(item, str): + self.logger.warning(f"Skipping non-string content item: {item}") + continue + # create an event for each content item + chunk_event = project_models.ResponseTextDeltaEvent( + item_id=self.item_id, + output_index=self.output_index, + content_index=self.content_index, + delta=item, + sequence_number=stream_state.sequence_number, + ) + self.aggregated_content += item + stream_state.sequence_number += 1 + res.append(chunk_event) + return True, self, res # mypy: ignore[return-value] + return False, self, [] + + def has_finish_reason(self, message) -> bool: + if not message or message.id != self.message_id: + return False + if message.response_metadata and message.response_metadata.get("finish_reason"): + return True + return False + + def should_end(self, message) -> bool: + # Determine if the message indicates end of the stream for this item + if message is None: + return True + if message.id != self.message_id: + return True + return False + + def on_end( # mypy: ignore[override] + self, message, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + if not self.started: + return False, [] + + # finalize the item resource + done_event = project_models.ResponseTextDoneEvent( + item_id=self.item_id, + output_index=self.output_index, + content_index=self.content_index, + text=self.aggregated_content, + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + self.parent.aggregate_content(self.aggregated_content) + has_finish = self.has_finish_reason(message) + return has_finish, [done_event] diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py new file mode 100644 index 000000000000..a6ad1cba7396 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py @@ -0,0 +1,138 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=unused-argument +# mypy: ignore-errors +import time +from typing import List + +from langchain_core import messages as langgraph_messages + +from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext + +from .response_event_generator import ( + ResponseEventGenerator, + StreamEventState, +) +from .response_output_item_event_generator import ResponseOutputItemEventGenerator + + +class ResponseStreamEventGenerator(ResponseEventGenerator): + """ + :meta private: + Response stream event generator. + """ + + def __init__(self, logger, parent): + super().__init__(logger, parent) + self.aggregated_contents: List[project_models.ItemResource] = [] + + def on_start( + self, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, List[project_models.ResponseStreamEvent]]: + if self.started: + return True, [] + agent_id = context.get_agent_id_object() + conversation = context.get_conversation_object() + # response create event + response_dict = { + "object": "response", + "agent_id": agent_id, + "conversation": conversation, + "id": context.response_id, + "status": "in_progress", + "created_at": int(time.time()), + } + created_event = project_models.ResponseCreatedEvent( + response=project_models.Response(response_dict), + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + + # response in progress + response_dict = { + "object": "response", + "agent_id": agent_id, + "conversation": conversation, + "id": context.response_id, + "status": "in_progress", + "created_at": int(time.time()), + } + in_progress_event = project_models.ResponseInProgressEvent( + response=project_models.Response(response_dict), + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + self.started = True + return True, [created_event, in_progress_event] + + def should_complete(self, event: langgraph_messages.AnyMessage) -> bool: + # Determine if the event indicates completion + if event is None: + return True + return False + + def try_process_message( + self, message: langgraph_messages.AnyMessage, context: AgentRunContext, stream_state: StreamEventState + ) -> tuple[bool, ResponseEventGenerator, List[project_models.ResponseStreamEvent]]: + is_processed = False + next_processor = self + events = [] + + if not self.started: + self.started, start_events = self.on_start(context, stream_state) + events.extend(start_events) + + if message: + # create a child processor + next_processor = ResponseOutputItemEventGenerator( + self.logger, self, len(self.aggregated_contents), message.id + ) + return is_processed, next_processor, events + + if self.should_end(message): + # received a None message, indicating end of the stream + done_events = self.on_end(message, context, stream_state) + events.extend(done_events) + is_processed = True + next_processor = None + + return is_processed, next_processor, events + + def should_end(self, event: langgraph_messages.AnyMessage) -> bool: + # Determine if the event indicates end of the stream + if event is None: + return True + return False + + def on_end(self, message: langgraph_messages.AnyMessage, context: AgentRunContext, stream_state: StreamEventState): + agent_id = context.get_agent_id_object() + conversation = context.get_conversation_object() + response_dict = { + "object": "response", + "agent_id": agent_id, + "conversation": conversation, + "id": context.response_id, + "status": "completed", + "created_at": int(time.time()), + "output": self.aggregated_contents, + } + done_event = project_models.ResponseCompletedEvent( + response=project_models.Response(response_dict), + sequence_number=stream_state.sequence_number, + ) + stream_state.sequence_number += 1 + if self.parent: + self.parent.aggregate_content(self.aggregated_contents) + return [done_event] + + def aggregate_content(self, content): + # aggregate content from children + if isinstance(content, list): + for c in content: + self.aggregate_content(c) + if isinstance(content, project_models.ItemResource): + self.aggregated_contents.append(content) + else: + raise ValueError(f"Invalid content type: {type(content)}, expected: {project_models.ItemResource}") diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/utils.py b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/utils.py new file mode 100644 index 000000000000..d9517d8b0e8d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/utils.py @@ -0,0 +1,68 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import json +from typing import get_type_hints + + +def extract_function_call(tool_call: dict): + """ + Extract function call details from tool_call dict. + + :param tool_call: The tool call dictionary containing function call details. + :type tool_call: dict + + :return: A tuple of (name, call_id, argument). + :rtype: tuple[str | None, str | None, str | None] + """ + name = tool_call.get("name") + call_id = tool_call.get("id") + argument = None + arguments_raw = tool_call.get("args") + if isinstance(arguments_raw, str): + argument = arguments_raw + elif isinstance(arguments_raw, dict): + argument = json.dumps(arguments_raw) + return name, call_id, argument + + +def is_state_schema_valid(state_schema) -> bool: + """ + Validate whether the state schema of a graph contains a field named messages + + :param state_schema: The state schema class from LangGraph + :type state_schema: TypedDict + + :return: True if the state schema contains a field named messages, False otherwise. + :rtype: bool + """ + fields = get_typeddict_fields(state_schema) + return "messages" in fields + + +def get_typeddict_fields(schema_class) -> dict: + """ + Get all fields/attributes from a TypedDict class. + + :param schema_class: The TypedDict class to extract fields from + :type schema_class: TypedDict + + :return: Dictionary of field names and their types + :rtype: dict + + Example: + >>> from typing_extensions import TypedDict + >>> class MyState(TypedDict): + ... messages: list[str] + ... user_id: str + >>> get_typeddict_fields(MyState) + {'messages': list[str], 'user_id': str} + """ + try: + return get_type_hints(schema_class) + except (TypeError, AttributeError): + # Fallback to __annotations__ + if hasattr(schema_class, "__annotations__"): + return schema_class.__annotations__ + + return {} diff --git a/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/py.typed b/sdk/ai/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-langgraph/cspell.json b/sdk/ai/azure-ai-agentserver-langgraph/cspell.json new file mode 100644 index 000000000000..470408fb66cc --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/cspell.json @@ -0,0 +1,16 @@ +{ + "ignoreWords": [ + "azureai", + "fstring", + "mslearn", + "envtemplate", + "ainvoke", + "asetup" + ], + "ignorePaths": [ + "*.csv", + "*.json", + "*.rst", + "**/samples/**" + ] + } \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/dev_requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/dev_requirements.txt new file mode 100644 index 000000000000..cb4256413d41 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/dev_requirements.txt @@ -0,0 +1,3 @@ +-e ../../../eng/tools/azure-sdk-tools +../azure-ai-agentserver-core +python-dotenv \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/mypy.ini b/sdk/ai/azure-ai-agentserver-langgraph/mypy.ini new file mode 100644 index 000000000000..e0bb0b83e2ce --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/mypy.ini @@ -0,0 +1,5 @@ +[mypy] +explicit_package_bases = True + +[mypy-samples.*] +ignore_errors = true \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/pyproject.toml b/sdk/ai/azure-ai-agentserver-langgraph/pyproject.toml new file mode 100644 index 000000000000..2b0a7310a23c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/pyproject.toml @@ -0,0 +1,63 @@ +[project] +name = "azure-ai-agentserver-langgraph" +dynamic = ["version", "readme"] +description = "LangGraph adapter for Azure AI Agent Server" +requires-python = ">=3.10" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +license = "MIT" +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +keywords = ["azure", "azure sdk"] + +dependencies = [ + "azure-ai-agentserver-core", + "langchain>0.3.5", + "langchain-openai>0.3.10", + "langchain-azure-ai[opentelemetry]>=0.1.4", + "langgraph>0.5.0", + "opentelemetry-exporter-otlp-proto-http", +] + +[build-system] +requires = ["setuptools>=69", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "samples*", + "doc*", + "azure", + "azure.ai", +] + +[tool.setuptools.dynamic] +version = { attr = "azure.ai.agentserver.langgraph._version.VERSION" } +readme = { file = ["README.md"], content-type = "text/markdown" } + +[tool.setuptools.package-data] +pytyped = ["py.typed"] + +[tool.ruff] +line-length = 120 +target-version = "py311" +lint.select = ["E", "F", "B", "I"] # E=pycodestyle errors, F=Pyflakes, B=bugbear, I=import sort +lint.ignore = [] +fix = false + +[tool.ruff.lint.isort] +known-first-party = ["azure.ai.agentserver.langgraph"] +combine-as-imports = true + +[tool.azure-sdk-build] +verifytypes = false # has unknown dependencies +pyright = false diff --git a/sdk/ai/azure-ai-agentserver-langgraph/pyrightconfig.json b/sdk/ai/azure-ai-agentserver-langgraph/pyrightconfig.json new file mode 100644 index 000000000000..2cc86d15e1e7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/pyrightconfig.json @@ -0,0 +1,17 @@ +{ + "reportAttributeAccessIssue": "warning", + "reportIncompatibleMethodOverride": "warning", + "reportReturnType": "warning", + "reportArgumentType": "warning", + "reportMissingImports": "warning", + "reportOptionalMemberAccess": "warning", + "reportGeneralTypeIssues": "warning", + "reportCallIssue": "warning", + + "exclude": [ + "**/tests/**", + "**/samples/**", + "**/setup.py", + "**/conftest.py" + ] +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/.env-template b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/.env-template new file mode 100644 index 000000000000..92b9c812a686 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/.env-template @@ -0,0 +1,4 @@ +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/README.md b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/README.md new file mode 100644 index 000000000000..dd2821accf15 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/README.md @@ -0,0 +1,84 @@ +# LangGraph Agent Calculator Sample + +This sample demonstrates how to create a calculator agent using LangGraph and using it with Container Agent Adapter. The agent can perform basic arithmetic operations (addition, multiplication, and division) by utilizing tools and making decisions about when to use them. + +## Overview + +The sample consists of several key components: + +- **LangGraph Agent**: A calculator agent that uses tools to perform arithmetic operations +- **Azure AI Agents Adapter**: Adapters of the LangGraph agents. It hosts the agent as a service on your local machine. + + +## Files Description + +- `langgraph_agent_calculator.py` - The main LangGraph agent implementation with calculator tools +- `main.py` - HTTP server entry point using the agents adapter +- `.env-template` A template of environment variables for Azure OpenAI configuration + + + +## Setup + +1. **Environment Configuration** + Create a `.env` file in this directory with your Azure OpenAI configuration: + ``` + AZURE_OPENAI_API_KEY=your_api_key_here + AZURE_OPENAI_ENDPOINT=your_endpoint_here + AZURE_OPENAI_API_VERSION=2024-02-15-preview + ``` + And install python-dotenv + ```bash + cd container_agents/container_agent_adapter/python + pip install python-dotenv + ``` + +2. **Install Dependencies** + Required Python packages (install via pip): + ```bash + cd container_agents/container_agent_adapter/python + pip install -e .[langgraph] + ``` + +## Usage + +### Running as HTTP Server + +1. Start the agent server: + ```bash + python main.py + ``` + The server will start on `http://localhost:8088` + +2. Test the agent: + ```bash + curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": { + "name": "local_agent", + "type": "agent_reference" + }, + "stream": false, + "input": "What is 15 divided by 3?" + }' + ``` + + or + + ```bash + curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": { + "name": "local_agent", + "type": "agent_reference" + }, + "stream": false, + "input": [{ + "type": "message", + "role": "user", + "content": [{"type": "input_text", "text": "What is 3 add 5?"}] + }] + }' + ``` \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/langgraph_agent_calculator.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/langgraph_agent_calculator.py new file mode 100644 index 000000000000..ffa8d14b208f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/langgraph_agent_calculator.py @@ -0,0 +1,142 @@ +import os + +from dotenv import load_dotenv +from langchain.chat_models import init_chat_model +from langchain_core.messages import SystemMessage, ToolMessage +from langchain_core.tools import tool +from langgraph.graph import ( + END, + START, + MessagesState, + StateGraph, +) +from typing_extensions import Literal +from azure.identity import DefaultAzureCredential, get_bearer_token_provider + +from azure.ai.agentserver.langgraph import from_langgraph + +load_dotenv() + +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +api_key = os.getenv("AZURE_OPENAI_API_KEY", "") + +if api_key: + llm = init_chat_model(f"azure_openai:{deployment_name}") +else: + credential = DefaultAzureCredential() + token_provider = get_bearer_token_provider( + credential, "https://cognitiveservices.azure.com/.default" + ) + llm = init_chat_model( + f"azure_openai:{deployment_name}", + azure_ad_token_provider=token_provider, + ) + + +# Define tools +@tool +def multiply(a: int, b: int) -> int: + """Multiply a and b. + + Args: + a: first int + b: second int + """ + return a * b + + +@tool +def add(a: int, b: int) -> int: + """Adds a and b. + + Args: + a: first int + b: second int + """ + return a + b + + +@tool +def divide(a: int, b: int) -> float: + """Divide a and b. + + Args: + a: first int + b: second int + """ + return a / b + + +# Augment the LLM with tools +tools = [add, multiply, divide] +tools_by_name = {tool.name: tool for tool in tools} +llm_with_tools = llm.bind_tools(tools) + + +# Nodes +def llm_call(state: MessagesState): + """LLM decides whether to call a tool or not""" + + return { + "messages": [ + llm_with_tools.invoke( + [ + SystemMessage( + content="You are a helpful assistant tasked with performing arithmetic on a set of inputs." + ) + ] + + state["messages"] + ) + ] + } + + +def tool_node(state: dict): + """Performs the tool call""" + + result = [] + for tool_call in state["messages"][-1].tool_calls: + tool = tools_by_name[tool_call["name"]] + observation = tool.invoke(tool_call["args"]) + result.append(ToolMessage(content=observation, tool_call_id=tool_call["id"])) + return {"messages": result} + + +# Conditional edge function to route to the tool node or end based upon whether the LLM made a tool call +def should_continue(state: MessagesState) -> Literal["environment", END]: + """Decide if we should continue the loop or stop based upon whether the LLM made a tool call""" + + messages = state["messages"] + last_message = messages[-1] + # If the LLM makes a tool call, then perform an action + if last_message.tool_calls: + return "Action" + # Otherwise, we stop (reply to the user) + return END + + +# Build workflow +agent_builder = StateGraph(MessagesState) + +# Add nodes +agent_builder.add_node("llm_call", llm_call) +agent_builder.add_node("environment", tool_node) + +# Add edges to connect nodes +agent_builder.add_edge(START, "llm_call") +agent_builder.add_conditional_edges( + "llm_call", + should_continue, + { + "Action": "environment", + END: END, + }, +) +agent_builder.add_edge("environment", "llm_call") + +# Compile the agent +agent = agent_builder.compile() + +if __name__ == "__main__": + adapter = from_langgraph(agent) + adapter.run() diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/requirements.txt new file mode 100644 index 000000000000..8c3bb2198ef1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agent_calculator/requirements.txt @@ -0,0 +1,3 @@ +python-dotenv>=1.0.0 +azure-ai-agentserver-core +azure-ai-agentserver-langgraph diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/.env-template b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/.env-template new file mode 100644 index 000000000000..7f9e5c66c97c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/.env-template @@ -0,0 +1,6 @@ +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME= +AZURE_OPENAI_EMBEDDINGS_MODEL_NAME= diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/edges/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/edges/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/edges/grade_documents.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/edges/grade_documents.py new file mode 100644 index 000000000000..1a37eb96103c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/edges/grade_documents.py @@ -0,0 +1,52 @@ +import os +from typing import Literal + +from dotenv import load_dotenv +from langchain.chat_models import init_chat_model +from langgraph.graph import MessagesState +from pydantic import BaseModel, Field + +GRADE_PROMPT = ( + "You are a grader assessing relevance of a retrieved document to a user question. \n " + "Here is the retrieved document: \n\n {context} \n\n" + "Here is the user question: {question} \n" + "If the document contains keyword(s) or semantic meaning related to the user question, grade it as relevant. \n" + "Give a binary score 'yes' or 'no' score to indicate whether the document is relevant to the question." +) + + +# highlight-next-line +class GradeDocuments(BaseModel): + """Grade documents using a binary score for relevance check.""" + + binary_score: str = Field( + description="Relevance score: 'yes' if relevant, or 'no' if not relevant" + ) + + +load_dotenv() +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +grader_model = init_chat_model(f"azure_openai:{deployment_name}") + + +def grade_documents( + state: MessagesState, +) -> Literal["generate_answer", "rewrite_question"]: + """Determine whether the retrieved documents are relevant to the question.""" + question = state["messages"][0].content + context = state["messages"][-1].content + + prompt = GRADE_PROMPT.format(question=question, context=context) + response = ( + grader_model + # highlight-next-line + .with_structured_output(GradeDocuments).invoke( + [{"role": "user", "content": prompt}] + ) + ) + score = response.binary_score + + if score == "yes": + return "generate_answer" + else: + return "rewrite_question" diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/generate_answer.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/generate_answer.py new file mode 100644 index 000000000000..42c2085d0819 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/generate_answer.py @@ -0,0 +1,27 @@ +import os + +from dotenv import load_dotenv +from langchain.chat_models import init_chat_model +from langgraph.graph import MessagesState + +GENERATE_PROMPT = ( + "You are an assistant for question-answering tasks. " + "Use the following pieces of retrieved context to answer the question. " + "If you don't know the answer, just say that you don't know. " + "Use three sentences maximum and keep the answer concise.\n" + "Question: {question} \n" + "Context: {context}" +) + +load_dotenv() +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +response_model = init_chat_model(f"azure_openai:{deployment_name}") + + +def generate_answer(state: MessagesState): + """Generate an answer.""" + question = state["messages"][0].content + context = state["messages"][-1].content + prompt = GENERATE_PROMPT.format(question=question, context=context) + response = response_model.invoke([{"role": "user", "content": prompt}]) + return {"messages": [response]} diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/generate_query_or_respond.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/generate_query_or_respond.py new file mode 100644 index 000000000000..9e4bd761ba60 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/generate_query_or_respond.py @@ -0,0 +1,25 @@ +import os + +from dotenv import load_dotenv +from langchain.chat_models import init_chat_model +from langgraph.graph import MessagesState + +# Add the parent directory to the Python path to allow imports +# sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) +from tools.retriever_tool import retriever_tool + +load_dotenv() +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +response_model = init_chat_model(f"azure_openai:{deployment_name}") + + +def generate_query_or_respond(state: MessagesState): + """Call the model to generate a response based on the current state. Given + the question, it will decide to retrieve using the retriever tool, or simply respond to the user. + """ + response = ( + response_model + # highlight-next-line + .bind_tools([retriever_tool]).invoke(state["messages"]) + ) + return {"messages": [response]} diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/rewrite_question.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/rewrite_question.py new file mode 100644 index 000000000000..6113e1093ed4 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/nodes/rewrite_question.py @@ -0,0 +1,27 @@ +import os + +from dotenv import load_dotenv +from langchain.chat_models import init_chat_model +from langgraph.graph import MessagesState + +REWRITE_PROMPT = ( + "Look at the input and try to reason about the underlying semantic intent / meaning.\n" + "Here is the initial question:" + "\n ------- \n" + "{question}" + "\n ------- \n" + "Formulate an improved question:" +) + +load_dotenv() +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +response_model = init_chat_model(f"azure_openai:{deployment_name}") + + +def rewrite_question(state: MessagesState): + """Rewrite the original user question.""" + messages = state["messages"] + question = messages[0].content + prompt = REWRITE_PROMPT.format(question=question) + response = response_model.invoke([{"role": "user", "content": prompt}]) + return {"messages": [{"role": "user", "content": response.content}]} diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/requirements.txt new file mode 100644 index 000000000000..18dcb4bfc1b7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv>=1.0.0 +langchain_community==0.4.0 +beautifulsoup4==4.14.2 +azure-ai-agentserver-core +azure-ai-agentserver-langgraph diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/tools/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/tools/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/tools/retriever_tool.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/tools/retriever_tool.py new file mode 100644 index 000000000000..be586c088ac8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/tools/retriever_tool.py @@ -0,0 +1,42 @@ +import os + +from dotenv import load_dotenv +from langchain_core.tools import create_retriever_tool +from langchain_community.document_loaders import WebBaseLoader +from langchain_core.vectorstores import InMemoryVectorStore +from langchain_openai import AzureOpenAIEmbeddings +from langchain_text_splitters import RecursiveCharacterTextSplitter + +load_dotenv() +deployment_name = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME", "text-embedding-3-small" +) +model_name = os.getenv("AZURE_OPENAI_EMBEDDINGS_MODEL_NAME", deployment_name) +aoai_embeddings = AzureOpenAIEmbeddings( + model=model_name, + azure_deployment=deployment_name, +) + +urls = [ + "https://lilianweng.github.io/posts/2024-11-28-reward-hacking/", + "https://lilianweng.github.io/posts/2024-07-07-hallucination/", + "https://lilianweng.github.io/posts/2024-04-12-diffusion-video/", +] + +docs = [WebBaseLoader(url).load() for url in urls] +docs_list = [item for sublist in docs for item in sublist] + +text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder( + chunk_size=100, chunk_overlap=50 +) +doc_splits = text_splitter.split_documents(docs_list) +vectorstore = InMemoryVectorStore.from_documents( + documents=doc_splits, embedding=aoai_embeddings +) +retriever = vectorstore.as_retriever() + +retriever_tool = create_retriever_tool( + retriever, + "retrieve_blog_posts", + "Search and return information about Lilian Weng blog posts.", # cspell:disable-line +) diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/workflow.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/workflow.py new file mode 100644 index 000000000000..9f7809e888c9 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/agentic_rag/workflow.py @@ -0,0 +1,48 @@ +from edges.grade_documents import grade_documents +from langgraph.graph import END, START, MessagesState, StateGraph +from langgraph.prebuilt import ToolNode, tools_condition +from nodes.generate_answer import generate_answer + +# Try relative imports first (works when imported as module) +from nodes.generate_query_or_respond import generate_query_or_respond +from nodes.rewrite_question import rewrite_question +from tools.retriever_tool import retriever_tool + +from azure.ai.agentserver.langgraph import from_langgraph + +workflow = StateGraph(MessagesState) + +# Define the nodes we will cycle between +workflow.add_node(generate_query_or_respond) +workflow.add_node("retrieve", ToolNode([retriever_tool])) +workflow.add_node(rewrite_question) +workflow.add_node(generate_answer) + +workflow.add_edge(START, "generate_query_or_respond") + +# Decide whether to retrieve +workflow.add_conditional_edges( + "generate_query_or_respond", + # Assess LLM decision (call `retriever_tool` tool or respond to the user) + tools_condition, + { + # Translate the condition outputs to nodes in our graph + "tools": "retrieve", + END: END, + }, +) + +# Edges taken after the `action` node is called. +workflow.add_conditional_edges( + "retrieve", + # Assess agent decision + grade_documents, +) +workflow.add_edge("generate_answer", END) +workflow.add_edge("rewrite_question", "generate_query_or_respond") + +# Compile +graph = workflow.compile() + +if __name__ == "__main__": + from_langgraph(graph).run() diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/.env-template b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/.env-template new file mode 100644 index 000000000000..6407cd1d9d13 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/.env-template @@ -0,0 +1,3 @@ +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +AZURE_AI_MODEL_DEPLOYMENT_NAME= \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/README.md b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/README.md new file mode 100644 index 000000000000..d9153b5c69ce --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/README.md @@ -0,0 +1,51 @@ +# Custom LangGraph State Converter (Mini RAG) Sample + +This sample demonstrates how to host a LangGraph agent **with a custom internal state** using the `azure.ai.agentshosting` SDK by supplying a custom `LanggraphStateConverter` (`RAGStateConverter`). It shows the minimal pattern required to adapt OpenAI Responses-style requests to a LangGraph state and back to an OpenAI-compatible response. + +## What It Shows +- Defining a custom state (`RAGState`) separate from the wire contract. +- Implementing `RAGStateConverter.request_to_state` and `state_to_response` to bridge request ↔ graph ↔ response. +- A simple multi-step graph: intent analysis → optional retrieval → answer generation. +- Lightweight retrieval (keyword scoring over an in‑memory knowledge base) with citation annotations added to the assistant message. +- Graceful local fallback answer when Azure OpenAI credentials are absent. +- Non‑streaming response path only (streaming intentionally not implemented). + +## Flow Overview +``` +CreateResponse request + -> RAGStateConverter.request_to_state + -> LangGraph executes nodes (analyze → retrieve? → answer) + -> Final state + -> RAGStateConverter.state_to_response + -> OpenAI-style response object +``` + +## Running +``` +python main.py +``` +Optional environment variables for live model call: +- AZURE_OPENAI_API_KEY +- AZURE_OPENAI_ENDPOINT (e.g. https://.cognitiveservices.azure.com/) +- AZURE_AI_MODEL_DEPLOYMENT_NAME (model deployment name) + +## Extending +| Goal | Change | +|------|--------| +| Real retrieval | Replace `retrieve_docs` with embedding + vector / search backend. | +| Richer answers | Introduce prompt templates or additional graph nodes. | +| Multi‑turn memory | Persist prior messages; include truncated history in `request_to_state`. | +| Tool / function calls | Add nodes producing tool outputs and incorporate into final response. | +| Better citations | Store offsets / URLs and expand annotation objects. | +| Streaming support | (See below) | + +### Adding Streaming +1. Allow `stream=True` in requests and propagate a flag into state. +2. Implement `get_stream_mode` (return appropriate mode, e.g. `events`). +3. Implement `state_to_response_stream` to yield `ResponseStreamEvent` objects (lifecycle + deltas) and finalize with a completed event. +4. Optionally collect incremental model tokens during `generate_answer`. + +## Key Takeaway +A custom `LanggraphStateConverter` is the seam where you map external request contracts to an internal graph-friendly state shape and then format the final (or streamed) result back to the OpenAI Responses schema. Start simple (non‑streaming), then layer retrieval sophistication, memory, tools, and streaming as needed. + +Streaming is not supported in this sample out-of-the-box. diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/main.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/main.py new file mode 100644 index 000000000000..27f5bf0d5ee2 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/main.py @@ -0,0 +1,293 @@ +from __future__ import annotations + +import os +import json +import time +from dataclasses import dataclass +from typing import Any, AsyncGenerator, AsyncIterator, Dict, List, TypedDict + +from dotenv import load_dotenv +from langgraph.graph import StateGraph, START, END +from openai import OpenAI, OpenAIError + +from azure.ai.agentserver.core import AgentRunContext +from azure.ai.agentserver.core.models import Response, ResponseStreamEvent +from azure.ai.agentserver.langgraph import from_langgraph +from azure.ai.agentserver.langgraph.models import ( + LanggraphStateConverter, +) + +load_dotenv() + +API_KEY = os.environ.get("AZURE_OPENAI_API_KEY") +BASE_URL = os.environ.get("AZURE_OPENAI_ENDPOINT") + "openai/v1" +DEPLOYMENT = os.environ.get("AZURE_AI_MODEL_DEPLOYMENT_NAME") # optional override +DEFAULT_MODEL = "gpt-4.1-mini" + + +# --------------------------------------------------------------------------- +# Simple in-memory knowledge base (replace with real vector DB in production) +# --------------------------------------------------------------------------- +@dataclass +class KBEntry: + id: str + text: str + tags: List[str] + + +KNOWLEDGE_BASE: List[KBEntry] = [ + KBEntry( + id="doc1", + text="LangGraph enables stateful AI workflows via graphs of nodes.", + tags=["langgraph", "workflow"], + ), + KBEntry( + id="doc2", + text="Retrieval augmented generation improves answer grounding by injecting documents.", + tags=["rag", "retrieval", "grounding"], + ), + KBEntry( + id="doc3", + text="Streaming responses send partial model outputs for lower latency user experience.", + tags=["streaming", "latency"], + ), +] + + +# --------------------------------------------------------------------------- +# LangGraph State definition +# --------------------------------------------------------------------------- +class RAGState(TypedDict, total=False): + query: str + messages: List[Dict[str, Any]] # simplified message records + needs_retrieval: bool + retrieved: List[Dict[str, Any]] # selected documents + answer_parts: List[str] # incremental answer assembly + final_answer: str # final answer text + _stream_events: List[Any] # buffered upstream model delta events (if any) + stream: bool # whether streaming was requested + + +# --------------------------------------------------------------------------- +# Utility: naive keyword scoring retrieval +# --------------------------------------------------------------------------- +KEYWORDS = { + "langgraph": ["langgraph", "graph"], + "retrieval": ["retrieval", "rag", "ground"], + "stream": ["stream", "latency", "partial"], +} + + +def retrieve_docs(question: str, k: int = 2) -> List[Dict[str, Any]]: + scores: List[tuple[float, KBEntry]] = [] + lower_q = question.lower() + for entry in KNOWLEDGE_BASE: + score = 0 + for token in entry.tags: + if token in lower_q: + score += 2 + for kw_group in KEYWORDS.values(): + for kw in kw_group: + if kw in lower_q and kw in entry.text.lower(): + score += 1 + if score > 0: + scores.append((score, entry)) + scores.sort(key=lambda t: t[0], reverse=True) + return [{"id": e.id, "text": e.text, "score": s} for s, e in scores[:k]] + + +# --------------------------------------------------------------------------- +# Custom Converter +# --------------------------------------------------------------------------- +class RAGStateConverter(LanggraphStateConverter): + """Converter implementing mini RAG logic (non‑streaming only).""" + + def get_stream_mode(self, context: AgentRunContext) -> str: # noqa: D401 + if context.request.get("stream", False): # type: ignore[attr-defined] + raise NotImplementedError("Streaming not supported in this sample.") + return "values" + + def request_to_state(self, context: AgentRunContext) -> Dict[str, Any]: # noqa: D401 + req = context.request + user_input = req.get("input") + if isinstance(user_input, list): + for item in user_input: + if isinstance(item, dict) and item.get("type") in ( + "message", + "input_text", + ): + user_input = item.get("content") or user_input + break + if isinstance(user_input, list): + user_input = " ".join(str(x) for x in user_input) + prompt = str(user_input or "") + messages = [] + instructions = req.get("instructions") + if instructions and isinstance(instructions, str): + messages.append({"role": "system", "content": instructions}) + messages.append({"role": "user", "content": prompt}) + res = { + "query": prompt, + "messages": messages, + "needs_retrieval": False, + "retrieved": [], + "answer_parts": [], + "stream": False, + } + print("initial state:", res) + return res + + def state_to_response( + self, state: Dict[str, Any], context: AgentRunContext + ) -> Response: # noqa: D401 + final_answer = state.get("final_answer") or "(no answer generated)" + print(f"convert state to response, state: {state}") + citations = state.get("retrieved", []) + output_item = { + "type": "message", + "role": "assistant", + "content": [ + { + "type": "output_text", + "text": final_answer, + "annotations": [ + { + "type": "citation", + "doc_id": c.get("id"), + "score": c.get("score"), + } + for c in citations + ], + } + ], + } + base = { + "object": "response", + "id": context.response_id, + "agent": context.get_agent_id_object(), + "conversation": context.get_conversation_object(), + "status": "completed", + "created_at": int(time.time()), + "output": [output_item], + } + return Response(**base) + + async def state_to_response_stream( # noqa: D401 + self, + stream_state: AsyncIterator[Dict[str, Any] | Any], + context: AgentRunContext, + ) -> AsyncGenerator[ResponseStreamEvent, None]: + raise NotImplementedError("Streaming not supported in this sample.") + + +# --------------------------------------------------------------------------- +# Graph Nodes +# --------------------------------------------------------------------------- + + +def _normalize_query(val: Any) -> str: + """Extract a lowercase text query from varied structures. + + Accepts: + * str + * dict with 'content' or 'text' + * list of mixed items (recursively extracts first textual segment) + Falls back to JSON stringification for unknown structures. + """ + if isinstance(val, str): + return val.strip().lower() + if isinstance(val, dict): + for k in ("content", "text", "value"): + v = val.get(k) + if isinstance(v, str) and v.strip(): + return v.strip().lower() + # flatten simple dict string values + parts = [str(v) for v in val.values() if isinstance(v, (str, int, float))] + if parts: + return " ".join(parts).lower() + if isinstance(val, list): + for item in val: # take first meaningful piece + extracted = _normalize_query(item) + if extracted: + return extracted + return "" + try: + return str(val).strip().lower() + except Exception: # noqa: BLE001 + return "" + + +def analyze_intent(state: RAGState) -> RAGState: + raw_q = state.get("query", "") + q = _normalize_query(raw_q) + keywords = ("what", "how", "explain", "retrieval", "langgraph", "stream") + needs = any(kw in q for kw in keywords) + state["needs_retrieval"] = needs + # Also store normalized form for downstream nodes if different + if isinstance(raw_q, (dict, list)): + state["query"] = q + return state + + +def retrieve_if_needed(state: RAGState) -> RAGState: + if state.get("needs_retrieval"): + state["retrieved"] = retrieve_docs(state.get("query", "")) + return state + + +def generate_answer(state: RAGState) -> RAGState: + query = state.get("query", "") + retrieved = state.get("retrieved", []) + + model_name = DEPLOYMENT or DEFAULT_MODEL + + def synthesize_answer() -> tuple[str, List[str]]: + if not retrieved: + text = f"Answer: {query}" if query else "No question provided." + return text, [text] + doc_summaries = "; ".join(r["text"] for r in retrieved) + answer = f"Based on docs: {doc_summaries}\n\nAnswer: {query}"[:4000] + return answer, [answer] + + if API_KEY and BASE_URL: + client = OpenAI(api_key=API_KEY, base_url=BASE_URL) + try: + resp = client.responses.create(model=model_name, input=query) + text = getattr(resp, "output_text", None) + if not text: + text = json.dumps(resp.model_dump(mode="json", exclude_none=True))[:500] + state["final_answer"] = text + state["answer_parts"] = [text] + return state + except OpenAIError: # fallback + state["final_answer"], state["answer_parts"] = synthesize_answer() + return state + state["final_answer"], state["answer_parts"] = synthesize_answer() + return state + + +# --------------------------------------------------------------------------- +# Build the LangGraph +# --------------------------------------------------------------------------- + + +def _build_graph(): + graph = StateGraph(RAGState) + graph.add_node("analyze", analyze_intent) + graph.add_node("retrieve", retrieve_if_needed) + graph.add_node("answer", generate_answer) + + graph.add_edge(START, "analyze") + graph.add_edge("analyze", "retrieve") + graph.add_edge("retrieve", "answer") + graph.add_edge("answer", END) + return graph.compile() + + +# --------------------------------------------------------------------------- +# Entry Point +# --------------------------------------------------------------------------- +if __name__ == "__main__": + graph = _build_graph() + converter = RAGStateConverter() + from_langgraph(graph, converter).run() diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/requirements.txt new file mode 100644 index 000000000000..2d00898c9143 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv>=1.0.0 +azure-ai-agentserver-core +azure-ai-agentserver-langgraph +openai +fastapi \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/.env-template b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/.env-template new file mode 100644 index 000000000000..04c14955bc69 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/.env-template @@ -0,0 +1,5 @@ +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +GITHUB_TOKEN= diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/README.md b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/README.md new file mode 100644 index 000000000000..cd9e88506127 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/README.md @@ -0,0 +1,126 @@ +# LangGraph MCP GitHub Token Sample + +This sample shows how to wrap a LangGraph ReAct-style agent that is augmented with an MCP (Model Context Protocol) server requiring an API key / personal access token (GitHub) and expose it through the Azure AI Agents Adapter so it can be called via the unified `responses` endpoint. + +Compared to `mcp_simple`, this version demonstrates adding authorization headers (Bearer token) for an MCP server (GitHub) that expects a token. + +## What It Does + +`mcp_apikey.py`: +1. Loads environment variables from a local `.env` file. +2. Creates an Azure OpenAI chat model deployment (defaults to `gpt-4o`, override with `AZURE_OPENAI_DEPLOYMENT`). +3. Reads a GitHub access token (`GITHUB_TOKEN`). This can be a classic or fine‑grained PAT (or an OAuth access token you obtained elsewhere). +4. Constructs a `MultiServerMCPClient` pointing at the public GitHub MCP endpoint and injects the token as an `Authorization: Bearer ...` header. +5. Fetches the available MCP tools exposed by the GitHub server. +6. Builds a LangGraph ReAct agent (`create_react_agent`) with those tools. +7. Hosts the agent using `from_langgraph(...).run_async()` making it available over HTTP (default: `http://localhost:8088`). + +## Folder Contents + +- `mcp_apikey.py` – Main script that builds and serves the token-authenticated MCP agent. +- `.env-template` – Template for required environment variables. +- `.env` – (User created) Actual secrets/endpoint values. Not committed. + +## Prerequisites + +Dependencies used by `mcp_apikey.py`: +- agents_adapter[langgraph] +- python-dotenv +- langchain-mcp-adapters + +Install: +```bash +pip install -e container_agents_adapter/python[langgraph] +pip install python-dotenv langchain-mcp-adapters +``` + +Requires Python 3.11+, Azure OpenAI deployment, and a `GITHUB_TOKEN`. + +## Environment Variables + +Copy `.env-template` to `.env` and fill in values: +``` +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +# Optional if your deployment name differs from gpt-4o +AZURE_OPENAI_DEPLOYMENT= + +# GitHub MCP auth (required) +GITHUB_TOKEN= +``` +Notes: +- `AZURE_OPENAI_DEPLOYMENT` defaults to `gpt-4o` if omitted. +- Do NOT commit `.env`. + +## (Dependencies Covered Above) + +## Run the Sample + +From the `mcp-apikey` folder (or anywhere after install) run: +```bash +python mcp_apikey.py +``` +The adapter starts an HTTP server (default `http://localhost:8088`). + +## Test the Agent + +Non-streaming example: +```bash +curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": {"name": "local_agent", "type": "agent_reference"}, + "stream": false, + "input": "Use ONLY the Microsoft Learn MCP tools exposed by the connected MCP server (no built-in web search, no cached data).call the \"list tools\" capability and record the exact tool names returned.Use the search tool to query: \"Model Context Protocol\" (limit 3).Pick the top result and use the fetch tool to retrieve details/content for that document." + }' +``` + +Streaming example (server will stream delta events): +```bash +curl -N -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": {"name": "local_agent", "type": "agent_reference"}, + "stream": true, + "input": "Use ONLY the Microsoft Learn MCP tools exposed by the connected MCP server (no built-in web search, no cached data).call the \"list tools\" capability and record the exact tool names returned.Use the search tool to query: \"Model Context Protocol\" (limit 3).Pick the top result and use the fetch tool to retrieve details/content for that document." + }' +``` + +Alternatively, you can send the richer structured message format: +```bash +curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": {"name": "local_agent", "type": "agent_reference"}, + "stream": false, + "input": [{ + "type": "message", + "role": "user", + "content": [{"type": "input_text", "text": "Use ONLY the Microsoft Learn MCP tools exposed by the connected MCP server (no built-in web search, no cached data).call the \"list tools\" capability and record the exact tool names returned.Use the search tool to query: \"Model Context Protocol\" (limit 3).Pick the top result and use the fetch tool to retrieve details/content for that document."}] + }] + }' +``` + +## Customization Ideas + +- Add additional MCP endpoints (e.g., documentation, search, custom internal tools). +- Swap `create_react_agent` for a custom LangGraph graph with memory, guardrails, or ranking. +- Integrate tracing / telemetry (LangSmith, OpenTelemetry) by adding callbacks to the model / agent. + +## Troubleshooting + +| Issue | Likely Cause | Fix | +|-------|--------------|-----| +| 401 from MCP server | Missing/invalid `GITHUB_TOKEN` | Regenerate PAT; ensure env var loaded | +| 401 / auth from model | Azure key/endpoint incorrect | Re-check `.env` values | +| Model not found | Deployment name mismatch | Set `AZURE_OPENAI_DEPLOYMENT` correctly | +| No tools listed | GitHub MCP endpoint changed | Verify endpoint URL & token scopes | +| Import errors | Extras not installed | Re-run dependency install | + +## Related Samples + +See `samples/langgraph/mcp_simple` for a no-auth MCP example and `samples/langgraph/agent_calculator` for arithmetic tooling. + +--- +Extend this pattern to securely integrate additional authenticated MCP servers. diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/mcp_apikey.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/mcp_apikey.py new file mode 100644 index 000000000000..12f5c50aadae --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/mcp_apikey.py @@ -0,0 +1,66 @@ +# Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + +import asyncio +import os + +from dotenv import load_dotenv +from importlib.metadata import version +from langchain_mcp_adapters.client import MultiServerMCPClient +from langchain_openai import AzureChatOpenAI + +from azure.ai.agentserver.langgraph import from_langgraph + +load_dotenv() # Load .env with Azure + GitHub credentials + + +def _get_required_env(name: str) -> str: + value = os.getenv(name) + if not value: + raise RuntimeError( + f"Missing required environment variable '{name}'. Please define it in your .env file." + ) + return value + + +def create_agent(model, tools): + # for different langgraph versions + langgraph_version = version("langgraph") + if langgraph_version < "1.0.0": + from langgraph.prebuilt import create_react_agent + + return create_react_agent(model, tools) + else: + from langchain.agents import create_agent + + return create_agent(model, tools) + + +async def build_agent(): + deployment = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") + model = AzureChatOpenAI(model=deployment) + + github_token = _get_required_env("GITHUB_TOKEN") + + client = MultiServerMCPClient( + { + "github": { + "url": "https://api.githubcopilot.com/mcp/", + "transport": "streamable_http", + "headers": {"Authorization": f"Bearer {github_token}"}, + } + } + ) + + tools = await client.get_tools() + agent = create_agent(model, tools) + return agent + + +async def _main(): + agent = await build_agent() + await from_langgraph(agent).run_async() + + +if __name__ == "__main__": + asyncio.run(_main()) diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/requirements.txt new file mode 100644 index 000000000000..e2ae1f5f6bf2 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_apikey/requirements.txt @@ -0,0 +1,4 @@ +python-dotenv>=1.0.0 +langchain-mcp-adapters==0.1.11 +azure-ai-agentserver-core +azure-ai-agentserver-langgraph diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/.env-template b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/.env-template new file mode 100644 index 000000000000..92b9c812a686 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/.env-template @@ -0,0 +1,4 @@ +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/README.md b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/README.md new file mode 100644 index 000000000000..4414a64ebcf9 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/README.md @@ -0,0 +1,121 @@ +# LangGraph MCP Simple Sample + +This sample shows how to wrap a LangGraph ReAct-style agent that is augmented with MCP (Model Context Protocol) tools (sourced from Microsoft Learn) and expose it through the Azure AI Agents Adapter so it can be called using the standard responses endpoint. + +## What It Does + +`mcp_simple.py`: +1. Loads environment variables from a local `.env` file (see template below). +2. Creates an Azure OpenAI chat model (`gpt-4o`) via `AzureChatOpenAI`. +3. Constructs an MCP multi-server client (`MultiServerMCPClient`) pointing at the Microsoft Learn MCP endpoint. +4. Fetches available MCP tools and builds a LangGraph ReAct agent with those tools (`create_react_agent`). +5. Hosts the agent using `from_langgraph(...).run_async()` so it is available over HTTP on `http://localhost:8088` (default adapter port). + +## Folder Contents + +- `mcp_simple.py` – Main script that builds and serves the agent. +- `.env-template` – Template for required Azure OpenAI environment variables. +- `.env` – (User created) Actual secrets/endpoint values. Not committed. + +## Prerequisites + +Dependencies used by `mcp_simple.py`: +- agents_adapter with langgraph extra (brings langgraph, langchain, langchain-openai) +- python-dotenv +- langchain-mcp-adapters + +Install (from repo root): +```bash +pip install -e container_agents_adapter/python[langgraph] +pip install python-dotenv langchain-mcp-adapters +``` + +Environment needs Azure OpenAI variables (see below). Requires Python 3.11+. + +## Environment Variables + +Copy `.env-template` to `.env` and fill in real values: +``` +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +``` +If you use a deployment name different from `gpt-4o`, adjust the `model="gpt-4o"` parameter in `mcp_simple.py` accordingly (e.g., the model argument must match your Azure OpenAI deployment name, not the base model family if they differ). + +## (Dependencies Covered Above) + +## Run the Sample + +From the `mcp_simple` folder (or anywhere after install) run: +```bash +python mcp_simple.py +``` +The adapter will start an HTTP server (default: `http://localhost:8088`). When ready, you can send a request to the unified responses endpoint. + +## Test the Agent + +Non-streaming example: +```bash +curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": {"name": "local_agent", "type": "agent_reference"}, + "stream": false, + "input": "Give me a short summary about Azure OpenAI" + }' +``` + +Streaming example (server will stream delta events): +```bash +curl -N -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": {"name": "local_agent", "type": "agent_reference"}, + "stream": true, + "input": "List two learning resources about Azure Functions" + }' +``` + +Alternatively, you can send the richer structured message format: +```bash +curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": {"name": "local_agent", "type": "agent_reference"}, + "stream": false, + "input": [{ + "type": "message", + "role": "user", + "content": [{"type": "input_text", "text": "What learning paths cover Azure AI?"}] + }] + }' +``` + +## MCP Tooling Notes + +- `MultiServerMCPClient` connects to one or more MCP servers; here we configure a single `mslearn` server. +- `get_tools()` returns tool schemas that LangGraph incorporates, enabling the agent to decide when to call MCP tools. +- The Microsoft Learn MCP endpoint can surface search / retrieval style tools (subject to availability) so the agent can ground answers. + +## Customization Ideas + +- Add more MCP endpoints by extending the dictionary passed to `MultiServerMCPClient`. +- Swap `create_react_agent` for a custom LangGraph graph if you need more control (e.g., tool prioritization, guardrails, memory). +- Introduce logging or tracing (e.g., LangSmith) by configuring callbacks on the model or agent. + +## Troubleshooting + +| Issue | Likely Cause | Fix | +|-------|--------------|-----| +| 401 / auth errors from model | Wrong or missing key / endpoint | Re-check `.env` values and Azure OpenAI resource permissions | +| Model not found | Deployment name mismatch | Use your actual Azure deployment name in `AzureChatOpenAI(model=...)` | +| No tools available | MCP endpoint change / network issue | Confirm the MCP URL and that it returns tool definitions | +| Import errors for langgraph or adapter | Extras not installed | Re-run `pip install -e .[langgraph]` | + + +## Related Samples + +See `samples/langgraph/agent_calculator` for another LangGraph + adapter example with arithmetic tools. + +--- +Happy hacking! Modify and extend the MCP tool set to build richer contextual agents. diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/mcp_simple.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/mcp_simple.py new file mode 100644 index 000000000000..1b3c996386f0 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/mcp_simple.py @@ -0,0 +1,56 @@ +# Copyright (c) Microsoft. All rights reserved. +"""Minimal LangGraph + MCP sample. + +Loads an MCP server (Microsoft Learn) and exposes a LangGraph ReAct agent + through the agents_adapter server. +""" + +import asyncio +import os + +from dotenv import load_dotenv +from importlib.metadata import version +from langchain_mcp_adapters.client import MultiServerMCPClient +from langchain_openai import AzureChatOpenAI + +from azure.ai.agentserver.langgraph import from_langgraph + +load_dotenv() + + +def create_agent(model, tools): + # for different langgraph versions + langgraph_version = version("langgraph") + if langgraph_version < "1.0.0": + from langgraph.prebuilt import create_react_agent + + return create_react_agent(model, tools) + else: + from langchain.agents import create_agent + + return create_agent(model, tools) + + +async def quickstart(): + """Build and return a LangGraph agent wired to an MCP client.""" + deployment = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") + model = AzureChatOpenAI(model=deployment) + client = MultiServerMCPClient( + { + "mslearn": { + "url": "https://learn.microsoft.com/api/mcp", + "transport": "streamable_http", + } + } + ) + tools = await client.get_tools() + return create_agent(model, tools) + + +async def main(): # pragma: no cover - sample entrypoint + agent = await quickstart() + await from_langgraph(agent).run_async() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/requirements.txt new file mode 100644 index 000000000000..ab8d43c36684 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/mcp_simple/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv>=1.0.0 +langchain-mcp-adapters==0.1.11 +azure-ai-agentserver-core +azure-ai-agentserver-langgraph + diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/.env-template b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/.env-template new file mode 100644 index 000000000000..41799808bfd6 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/.env-template @@ -0,0 +1,7 @@ +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +CHECKPOINTER_REDIS_URL= +CHECKPOINTER_REDIS_KEY= +CHECKPOINTER_REDIS_PORT=10000 diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/README.md b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/README.md new file mode 100644 index 000000000000..0fe1660269ca --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/README.md @@ -0,0 +1,68 @@ +# simple_agent_with_redis_checkpointer + +This sample demonstrates a LangGraph-based simple agent that uses an Azure managed Redis instance as a checkpointer. + +# Prerequisites +Create an Azure Managed Redis instance + +1) Install the Redis Enterprise CLI extension (if not already installed) + ``` + az extension add --name redisenterprise + ``` + +2) Create a resource group (example) + ``` + az group create --name myRedisRG --location eastus + ``` + +3) Create a Redis Enterprise instance with RedisJSON and RediSearch modules enabled + Create an [Azure Managed Redis instance](https://learn.microsoft.com/azure/redis/quickstart-create-managed-redis). For LangGraph checkpointer, the instance must have RedisJSON and RediSearch enabled. Clustering-policy should be EnterpriseCluster. Those configurations have to be set when creating. Redis sku and capacities can be configured with your needs. + + When your redis instance is ready, add the redis information to environment variables. + +# Setup + +1. **Environment Configuration** + Create a `.env` file in this directory with your Azure OpenAI and Redis configuration: + ``` + AZURE_OPENAI_API_KEY= + AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ + OPENAI_API_VERSION=2025-03-01-preview + CHECKPOINTER_REDIS_URL=..redis.azure.net + CHECKPOINTER_REDIS_KEY= + CHECKPOINTER_REDIS_PORT=10000 + ``` + And install python-dotenv + ```bash + pip install python-dotenv langgraph-checkpoint-redis + ``` + +2. **Install Dependencies** + Required Python packages (install via pip): + ```bash + cd container_agents/container_agent_adapter/python + pip install -e .[langgraph] + ``` + + +# Running as HTTP Server + +1. Start the agent server: + ```bash + python main.py + ``` + The server will start on `http://localhost:8088` + +2. Test the agent: + ```bash + curl -X POST http://localhost:8088/responses \ + -H "Content-Type: application/json" \ + -d '{ + "agent": { + "name": "local_agent", + "type": "agent_reference" + }, + "stream": false, + "input": "What is 15 divided by 3?" + "conversation": {"id": "test-conversation-id"} + }' \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/main.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/main.py new file mode 100644 index 000000000000..7b1c2b5e9fb1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/main.py @@ -0,0 +1,72 @@ +import asyncio +import os + +from importlib.metadata import version +from dotenv import load_dotenv +from langchain_core.tools import tool +from langchain_openai import AzureChatOpenAI +from langgraph.checkpoint.redis.aio import AsyncRedisSaver +from redis.asyncio import Redis + +from azure.ai.agentserver.langgraph import from_langgraph + +load_dotenv() + +client = Redis( + host=os.getenv("CHECKPOINTER_REDIS_URL"), + port=os.getenv("CHECKPOINTER_REDIS_PORT"), + password=os.getenv("CHECKPOINTER_REDIS_KEY"), + ssl=True, + decode_responses=False, # RedisSaver expects bytes +) + +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +model = AzureChatOpenAI(model=deployment_name) + + +@tool +def get_word_length(word: str) -> int: + """Returns the length of a word.""" + return len(word) + + +@tool +def calculator(expression: str) -> str: + """Evaluates mathematical expression""" + try: + maths_result = eval(expression) + return str(maths_result) + except Exception as e: + return f"Error: {str(e)}" + + +tools = [get_word_length, calculator] + + +def create_agent(model, tools, checkpointer): + # for different langgraph versions + langgraph_version = version("langgraph") + if langgraph_version < "1.0.0": + from langgraph.prebuilt import create_react_agent + + return create_react_agent(model, tools, checkpointer=checkpointer) + else: + from langchain.agents import create_agent + + return create_agent(model, tools, checkpointer=checkpointer) + + +async def run_async(): + # Pass the configured client to RedisSaver + # adapter uses astream/ainvoke so we need async checkpointer + saver = AsyncRedisSaver(redis_client=client) + await saver.asetup() + + executor = create_agent(model, tools, checkpointer=saver) + # start server with async + await from_langgraph(executor).run_async() + + +if __name__ == "__main__": + # host the langgraph agent + asyncio.run(run_async()) diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/requirements.txt new file mode 100644 index 000000000000..8687d2061ad5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_agent_with_redis_checkpointer/requirements.txt @@ -0,0 +1,5 @@ +python-dotenv>=1.0.0 +langgraph-checkpoint-redis==0.1.2 +azure-ai-agentserver-core +azure-ai-agentserver-langgraph + diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/.env-template b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/.env-template new file mode 100644 index 000000000000..92b9c812a686 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/.env-template @@ -0,0 +1,4 @@ +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_ENDPOINT=https://.cognitiveservices.azure.com/ +OPENAI_API_VERSION=2025-03-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/main.py b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/main.py new file mode 100644 index 000000000000..b3249ef6ecb1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/main.py @@ -0,0 +1,53 @@ +import os + +from dotenv import load_dotenv +from importlib.metadata import version +from langchain_core.tools import tool +from langchain_openai import AzureChatOpenAI +from langgraph.checkpoint.memory import MemorySaver + +from azure.ai.agentserver.langgraph import from_langgraph + +load_dotenv() + +memory = MemorySaver() +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +model = AzureChatOpenAI(model=deployment_name) + + +@tool +def get_word_length(word: str) -> int: + """Returns the length of a word.""" + return len(word) + + +@tool +def calculator(expression: str) -> str: + """Evaluates mathematical expression""" + try: + maths_result = eval(expression) + return str(maths_result) + except Exception as e: + return f"Error: {str(e)}" + + +def create_agent(model, tools, checkpointer): + # for different langgraph versions + langgraph_version = version("langgraph") + if langgraph_version < "1.0.0": + from langgraph.prebuilt import create_react_agent + + return create_react_agent(model, tools, checkpointer=checkpointer) + else: + from langchain.agents import create_agent + + return create_agent(model, tools, checkpointer=checkpointer) + + +tools = [get_word_length, calculator] + +agent_executor = create_agent(model, tools, memory) + +if __name__ == "__main__": + # host the langgraph agent + from_langgraph(agent_executor).run() diff --git a/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/requirements.txt b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/requirements.txt new file mode 100644 index 000000000000..5d7322e06ed8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/samples/simple_react_agent/requirements.txt @@ -0,0 +1,3 @@ +python-dotenv>=1.0.0 +azure-ai-agentserver-core +azure-ai-agentserver-langgraph \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-langgraph/tests/__init__.py b/sdk/ai/azure-ai-agentserver-langgraph/tests/__init__.py new file mode 100644 index 000000000000..4a5d26360bce --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/tests/__init__.py @@ -0,0 +1 @@ +# Unit tests package diff --git a/sdk/ai/azure-ai-agentserver-langgraph/tests/conftest.py b/sdk/ai/azure-ai-agentserver-langgraph/tests/conftest.py new file mode 100644 index 000000000000..7f055e40010c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/tests/conftest.py @@ -0,0 +1,10 @@ +""" +Pytest configuration and shared fixtures for unit tests. +""" + +import sys +from pathlib import Path + +# Add the src directory to the Python path so we can import modules under test +src_path = Path(__file__).parent.parent.parent / "src" +sys.path.insert(0, str(src_path)) diff --git a/sdk/ai/azure-ai-agentserver-langgraph/tests/unit_tests/test_langgraph_request_converter.py b/sdk/ai/azure-ai-agentserver-langgraph/tests/unit_tests/test_langgraph_request_converter.py new file mode 100644 index 000000000000..84a8c8784d8b --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-langgraph/tests/unit_tests/test_langgraph_request_converter.py @@ -0,0 +1,121 @@ +import pytest +from langchain_core import messages as langgraph_messages + +from azure.ai.agentserver.core import models +from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.langgraph import models as langgraph_models + + +@pytest.mark.unit +def test_convert_implicit_user_message(): + """Test conversion of ImplicitUserMessage to HumanMessage.""" + + input_data = "input text string" + implicit_user_message = {"content": input_data} + create_response = models.CreateResponse( + input=[implicit_user_message], + ) + + converter = langgraph_models.LangGraphRequestConverter(create_response) + res = converter.convert() + + assert "messages" in res + assert len(res["messages"]) == 1 + assert isinstance(res["messages"][0], langgraph_messages.HumanMessage) + assert res["messages"][0].content == input_data + + +@pytest.mark.unit +def test_convert_implicit_user_message_with_contents(): + """Test conversion of ImplicitUserMessage with list of contents to HumanMessage.""" + + input_data = [ + {"text": "text content", "type": "input_text"}, + ] + create_response = models.CreateResponse(input=[{"content": input_data}]) + + converter = langgraph_models.LangGraphRequestConverter(create_response) + res = converter.convert() + + assert "messages" in res + assert len(res["messages"]) == 1 + assert isinstance(res["messages"][0], langgraph_messages.HumanMessage) + assert isinstance(res["messages"][0].content, list) + assert len(res["messages"][0].content) == len(input_data) + + for item_content, content in zip(input_data, res["messages"][0].content, strict=False): + assert isinstance(content, dict) + assert content["type"] == "text" + assert content["text"] == item_content.get("text") + + +@pytest.mark.unit +def test_convert_item_param_message(): + """Test conversion of ItemParam of type MESSAGE to corresponding message.""" + + input_data = [ + {"role": "user", "content": "user message"}, + {"role": "assistant", "content": "assistant message"}, + {"role": "system", "content": "system message"}, + ] + create_response = models.CreateResponse( + input=input_data, + ) + converter = langgraph_models.LangGraphRequestConverter(create_response) + res = converter.convert() + + assert "messages" in res + assert len(res["messages"]) == len(input_data) + + for item, message in zip(input_data, res["messages"], strict=False): + if item["role"] == project_models.ResponsesMessageRole.USER: + assert isinstance(message, langgraph_messages.HumanMessage) + elif item["role"] == project_models.ResponsesMessageRole.ASSISTANT: + assert isinstance(message, langgraph_messages.AIMessage) + elif item["role"] == project_models.ResponsesMessageRole.SYSTEM: + assert isinstance(message, langgraph_messages.SystemMessage) + else: + pytest.fail(f"Unexpected role: {item['role']}") + + assert isinstance(message.content, str) + assert message.content == item["content"] + + +@pytest.mark.unit +def test_convert_item_param_function_call_and_function_call_output(): + """Test conversion of ItemParam of type FUNCTION_CALL and FUNCTION_CALL_OUTPUT to corresponding message.""" + + input_data = [ + { + "type": "function_call", + "call_id": "call_001", + "name": "get_ticket_status", + "arguments": '{"ticket_number": "845732"}', + "status": "completed", + }, + { + "type": "function_call_output", + "call_id": "call_001", + "output": ('{"ticket_number": "845732", "status": "in_progress", "last_updated": "2024-07-15T09:42:00Z"}'), + "status": "completed", + }, + ] + create_response = models.CreateResponse( + input=input_data, + ) + converter = langgraph_models.LangGraphRequestConverter(create_response) + res = converter.convert() + assert "messages" in res + assert len(res["messages"]) == len(input_data) + assert isinstance(res["messages"][0], langgraph_messages.AIMessage) + assert res["messages"][0].tool_calls is not None + assert len(res["messages"][0].tool_calls) == 1 + tool_call_detail = res["messages"][0].tool_calls[0] + assert tool_call_detail["id"] == "call_001" + assert tool_call_detail["name"] == "get_ticket_status" + assert tool_call_detail["args"] == {"ticket_number": "845732"} + assert isinstance(res["messages"][1], langgraph_messages.ToolMessage) + assert res["messages"][1].tool_call_id == "call_001" + assert res["messages"][1].content == ( + '{"ticket_number": "845732", "status": "in_progress", "last_updated": "2024-07-15T09:42:00Z"}' + ) diff --git a/sdk/ai/ci.yml b/sdk/ai/ci.yml index 20ceb03e1897..b7682b6839b6 100644 --- a/sdk/ai/ci.yml +++ b/sdk/ai/ci.yml @@ -63,6 +63,12 @@ extends: safeName: azureaiagents - name: azure-ai-voicelive safeName: azureaivoicelive + - name: azure-ai-agentserver-core + safeName: azureaiagentservercore + - name: azure-ai-agentserver-langgraph + safeName: azureaiagentserverlanggraph + - name: azure-ai-agentserver-agentframework + safeName: azureaiagentserveragentframework # These packages are deprecated: # - name: azure-ai-generative # safeName: azureaigenerative diff --git a/shared_requirements.txt b/shared_requirements.txt index f6f6bab34b2e..67ee6594cc6c 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -83,4 +83,9 @@ prompty Jinja2 azure-ai-language-conversations azure-ai-textanalytics -azure-confidentialledger-certificate \ No newline at end of file +azure-confidentialledger-certificate +azure-ai-projects +starlette +uvicorn +opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc \ No newline at end of file