diff --git a/eng/.docsettings.yml b/eng/.docsettings.yml index 715e5002af8b..6d53ed8ee718 100644 --- a/eng/.docsettings.yml +++ b/eng/.docsettings.yml @@ -22,7 +22,7 @@ omitted_paths: language: python root_check_enabled: True required_readme_sections: - - ^Azure (.+ client library for Python|Smoke Test for Python) + - ^Azure (.+ client library for Python|Smoke Test for Python|Agent Server Adapter for Python) - ^Getting started$ - ^Key concepts$ - ^Examples$ diff --git a/eng/tools/azure-sdk-tools/ci_tools/functions.py b/eng/tools/azure-sdk-tools/ci_tools/functions.py index 09295b829000..a41f6fa683c2 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/functions.py @@ -55,7 +55,10 @@ "sdk/textanalytics/azure-ai-textanalytics", ] -TEST_COMPATIBILITY_MAP = {"azure-ai-ml": ">=3.7"} +TEST_COMPATIBILITY_MAP = { + "azure-ai-ml": ">=3.7", + "azure-ai-agentserver-core": ">=3.9", # override to allow build with python 3.9 +} TEST_PYTHON_DISTRO_INCOMPATIBILITY_MAP = { "azure-storage-blob": "pypy", "azure-storage-queue": "pypy", diff --git a/sdk/ai/azure-ai-agentserver-core/CHANGELOG.md b/sdk/ai/azure-ai-agentserver-core/CHANGELOG.md new file mode 100644 index 000000000000..7ce1742693b8 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/CHANGELOG.md @@ -0,0 +1,7 @@ +# Release History + +## 1.0.0a1 (2025-11-06) + +### Features Added + +First version diff --git a/sdk/ai/azure-ai-agentserver-core/LICENSE b/sdk/ai/azure-ai-agentserver-core/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/MANIFEST.in b/sdk/ai/azure-ai-agentserver-core/MANIFEST.in new file mode 100644 index 000000000000..eefbfbed7925 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/MANIFEST.in @@ -0,0 +1,9 @@ +include *.md +include LICENSE +recursive-include tests *.py +recursive-include samples *.py *.md +recursive-include doc *.rst *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/agentserver/__init__.py +include azure/ai/agentserver/core/py.typed diff --git a/sdk/ai/azure-ai-agentserver-core/README.md b/sdk/ai/azure-ai-agentserver-core/README.md new file mode 100644 index 000000000000..659daefc68e7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/README.md @@ -0,0 +1,119 @@ +# Azure AI Agent Server Adapter for Python + + +## Getting started + +```bash +pip install azure-ai-agentserver-core +``` + +## Key concepts + +This is the core package for Azure AI Agent server. It hosts your agent as a container on the cloud. + +You can talk to your agent using azure-ai-project sdk. + + +## Examples + +If your agent is not built using a supported framework such as LangGraph and Agent-framework, you can still make it compatible with Microsoft AI Foundry by manually implementing the predefined interface. + +```python +import datetime + +from azure.ai.agentserver.core import FoundryCBAgent +from azure.ai.agentserver.core.models import ( + CreateResponse, + Response as OpenAIResponse, +) +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + + +def stream_events(text: str): + assembled = "" + for i, token in enumerate(text.split(" ")): + piece = token if i == len(text.split(" ")) - 1 else token + " " + assembled += piece + yield ResponseTextDeltaEvent(delta=piece) + # Done with text + yield ResponseTextDoneEvent(text=assembled) + + +async def agent_run(request_body: CreateResponse): + agent = request_body.agent + print(f"agent:{agent}") + + if request_body.stream: + return stream_events("I am mock agent with no intelligence in stream mode.") + + # Build assistant output content + output_content = [ + ItemContentOutputText( + text="I am mock agent with no intelligence.", + annotations=[], + ) + ] + + response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="me", + id="id", + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + status="completed", + content=output_content, + ) + ], + ) + return response + + +my_agent = FoundryCBAgent() +my_agent.agent_run = agent_run + +if __name__ == "__main__": + my_agent.run() + +``` + +## Troubleshooting + +First run your agent with azure-ai-agentserver-core locally. + +If it works on local by failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project. + + +### Reporting issues + +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). Mention the package name "azure-ai-agents" in the title or content. + + +## Next steps + +Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agentserver-core/samples) folder. There are several cases for you to build your agent with azure-ai-agentserver + + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. diff --git a/sdk/ai/azure-ai-agentserver-core/azure/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py new file mode 100644 index 000000000000..895074d32ae3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py @@ -0,0 +1,14 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +__path__ = __import__("pkgutil").extend_path(__path__, __name__) + +from ._version import VERSION +from .logger import configure as config_logging +from .server.base import FoundryCBAgent +from .server.common.agent_run_context import AgentRunContext + +config_logging() + +__all__ = ["FoundryCBAgent", "AgentRunContext"] +__version__ = VERSION diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py new file mode 100644 index 000000000000..44465a1b2f12 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0a1" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py new file mode 100644 index 000000000000..a13f23aa261e --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/constants.py @@ -0,0 +1,14 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +class Constants: + # well-known environment variables + APPLICATION_INSIGHTS_CONNECTION_STRING = "_AGENT_RUNTIME_APP_INSIGHTS_CONNECTION_STRING" + AZURE_AI_PROJECT_ENDPOINT = "AZURE_AI_PROJECT_ENDPOINT" + AGENT_ID = "AGENT_ID" + AGENT_NAME = "AGENT_NAME" + AGENT_PROJECT_RESOURCE_ID = "AGENT_PROJECT_NAME" + OTEL_EXPORTER_ENDPOINT = "OTEL_EXPORTER_ENDPOINT" + AGENT_LOG_LEVEL = "AGENT_LOG_LEVEL" + AGENT_DEBUG_ERRORS = "AGENT_DEBUG_ERRORS" + ENABLE_APPLICATION_INSIGHTS_LOGGER = "ENABLE_APPLICATION_INSIGHTS_LOGGER" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py new file mode 100644 index 000000000000..cefed9f5ef00 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py @@ -0,0 +1,159 @@ +# pylint: disable=broad-exception-caught,dangerous-default-value +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import contextvars +import logging +import os +from logging import config + +from ._version import VERSION +from .constants import Constants + +default_log_config = { + "version": 1, + "disable_existing_loggers": False, + "loggers": { + "azure.ai.agentshosting": { + "handlers": ["console"], + "level": "INFO", + "propagate": False, + }, + }, + "handlers": { + "console": {"formatter": "std_out", "class": "logging.StreamHandler", "level": "INFO"}, + }, + "formatters": {"std_out": {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"}}, +} + +request_context = contextvars.ContextVar("request_context", default=None) + + +def get_dimensions(): + env_values = {name: value for name, value in vars(Constants).items() if not name.startswith("_")} + res = {"azure.ai.agentshosting.version": VERSION} + for name, env_name in env_values.items(): + if isinstance(env_name, str) and not env_name.startswith("_"): + runtime_value = os.environ.get(env_name) + if runtime_value: + res[f"azure.ai.agentshosting.{name.lower()}"] = runtime_value + return res + + +def get_project_endpoint(): + project_resource_id = os.environ.get(Constants.AGENT_PROJECT_RESOURCE_ID) + if project_resource_id: + last_part = project_resource_id.split("/")[-1] + + parts = last_part.split("@") + if len(parts) < 2: + print(f"invalid project resource id: {project_resource_id}") + return None + account = parts[0] + project = parts[1] + return f"https://{account}.services.ai.azure.com/api/projects/{project}" + print("environment variable AGENT_PROJECT_RESOURCE_ID not set.") + return None + + +def get_application_insights_connstr(): + try: + conn_str = os.environ.get(Constants.APPLICATION_INSIGHTS_CONNECTION_STRING) + if not conn_str: + print("environment variable APPLICATION_INSIGHTS_CONNECTION_STRING not set.") + project_endpoint = get_project_endpoint() + if project_endpoint: + # try to get the project connected application insights + from azure.ai.projects import AIProjectClient + from azure.identity import DefaultAzureCredential + + project_client = AIProjectClient(credential=DefaultAzureCredential(), endpoint=project_endpoint) + conn_str = project_client.telemetry.get_application_insights_connection_string() + if not conn_str: + print(f"no connected application insights found for project:{project_endpoint}") + else: + os.environ[Constants.APPLICATION_INSIGHTS_CONNECTION_STRING] = conn_str + return conn_str + except Exception as e: + print(f"failed to get application insights with error: {e}") + return None + + +class CustomDimensionsFilter(logging.Filter): + def filter(self, record): + # Add custom dimensions to every log record + dimensions = get_dimensions() + for key, value in dimensions.items(): + setattr(record, key, value) + cur_request_context = request_context.get() + if cur_request_context: + for key, value in cur_request_context.items(): + setattr(record, key, value) + return True + + +def configure(log_config: dict = default_log_config): + """ + Configure logging based on the provided configuration dictionary. + The dictionary should contain the logging configuration in a format compatible with `logging.config.dictConfig`. + + :param log_config: A dictionary containing logging configuration. + :type log_config: dict + """ + try: + config.dictConfig(log_config) + + application_insights_connection_string = get_application_insights_connstr() + enable_application_insights_logger = ( + os.environ.get(Constants.ENABLE_APPLICATION_INSIGHTS_LOGGER, "true").lower() == "true" + ) + if application_insights_connection_string and enable_application_insights_logger: + from opentelemetry._logs import set_logger_provider + from opentelemetry.sdk._logs import ( + LoggerProvider, + LoggingHandler, + ) + from opentelemetry.sdk._logs.export import BatchLogRecordProcessor + from opentelemetry.sdk.resources import Resource + + from azure.monitor.opentelemetry.exporter import AzureMonitorLogExporter + + logger_provider = LoggerProvider(resource=Resource.create({"service.name": "azure.ai.agentshosting"})) + set_logger_provider(logger_provider) + + exporter = AzureMonitorLogExporter(connection_string=application_insights_connection_string) + + logger_provider.add_log_record_processor(BatchLogRecordProcessor(exporter)) + handler = LoggingHandler(logger_provider=logger_provider) + handler.name = "appinsights_handler" + + # Add custom filter to inject dimensions + custom_filter = CustomDimensionsFilter() + handler.addFilter(custom_filter) + + # Only add to azure.ai.agentshosting namespace to avoid infrastructure logs + app_logger = logging.getLogger("azure.ai.agentshosting") + app_logger.setLevel(get_log_level()) + app_logger.addHandler(handler) + + except Exception as e: + print(f"Failed to configure logging: {e}") + + +def get_log_level(): + log_level = os.getenv(Constants.AGENT_LOG_LEVEL, "INFO").upper() + valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] + if log_level not in valid_levels: + print(f"Invalid log level '{log_level}' specified. Defaulting to 'INFO'.") + log_level = "INFO" + return log_level + + +def get_logger() -> logging.Logger: + """ + If the logger is not already configured, it will be initialized with default settings. + + :return: Configured logger instance. + :rtype: logging.Logger + """ + return logging.getLogger("azure.ai.agentshosting") diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py new file mode 100644 index 000000000000..d5622ebe7732 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py @@ -0,0 +1,7 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from ._create_response import CreateResponse # type: ignore +from .projects import Response, ResponseStreamEvent + +__all__ = ["CreateResponse", "Response", "ResponseStreamEvent"] # type: ignore[var-annotated] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py new file mode 100644 index 000000000000..a38f55408c7f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py @@ -0,0 +1,12 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=no-name-in-module +from typing import Optional + +from .openai import response_create_params # type: ignore +from . import projects as _azure_ai_projects_models + +class CreateResponse(response_create_params.ResponseCreateParamsBase, total=False): # type: ignore + agent: Optional[_azure_ai_projects_models.AgentReference] + stream: Optional[bool] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py new file mode 100644 index 000000000000..ecf2179f53b7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py @@ -0,0 +1,16 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +""" +Re-exports of OpenAI SDK response types. + +This module re-exports types from the OpenAI SDK for convenience. +These types are fully documented in the OpenAI SDK documentation. + +.. note:: + This module re-exports OpenAI SDK types. For detailed documentation, + please refer to the `OpenAI Python SDK documentation `_. +""" +from openai.types.responses import * # pylint: disable=unused-wildcard-import + +__all__ = [name for name in globals() if not name.startswith("_")] # type: ignore[var-annotated] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py new file mode 100644 index 000000000000..f65ea1133818 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py @@ -0,0 +1,820 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + A2ATool, + AISearchIndexResource, + AgentClusterInsightResult, + AgentClusterInsightsRequest, + AgentContainerObject, + AgentContainerOperationError, + AgentContainerOperationObject, + AgentDefinition, + AgentId, + AgentObject, + AgentObjectVersions, + AgentReference, + AgentTaxonomyInput, + AgentVersionObject, + AgenticIdentityCredentials, + Annotation, + AnnotationFileCitation, + AnnotationFilePath, + AnnotationUrlCitation, + ApiError, + ApiErrorResponse, + ApiInnerError, + ApiKeyCredentials, + ApproximateLocation, + AzureAIAgentTarget, + AzureAISearchAgentTool, + AzureAISearchIndex, + AzureAISearchToolResource, + AzureFunctionAgentTool, + AzureFunctionBinding, + AzureFunctionDefinition, + AzureFunctionDefinitionFunction, + AzureFunctionStorageQueue, + AzureOpenAIModelConfiguration, + BaseCredentials, + BingCustomSearchAgentTool, + BingCustomSearchConfiguration, + BingCustomSearchToolParameters, + BingGroundingAgentTool, + BingGroundingSearchConfiguration, + BingGroundingSearchToolParameters, + BlobReference, + BlobReferenceSasCredential, + BrowserAutomationAgentTool, + BrowserAutomationToolConnectionParameters, + BrowserAutomationToolParameters, + CaptureStructuredOutputsTool, + ChartCoordinate, + ChatSummaryMemoryItem, + ClusterInsightResult, + ClusterTokenUsage, + CodeBasedEvaluatorDefinition, + CodeInterpreterOutput, + CodeInterpreterOutputImage, + CodeInterpreterOutputLogs, + CodeInterpreterTool, + CodeInterpreterToolAuto, + CodeInterpreterToolCallItemParam, + CodeInterpreterToolCallItemResource, + ComparisonFilter, + CompoundFilter, + ComputerAction, + ComputerActionClick, + ComputerActionDoubleClick, + ComputerActionDrag, + ComputerActionKeyPress, + ComputerActionMove, + ComputerActionScreenshot, + ComputerActionScroll, + ComputerActionTypeKeys, + ComputerActionWait, + ComputerToolCallItemParam, + ComputerToolCallItemResource, + ComputerToolCallOutputItemOutput, + ComputerToolCallOutputItemOutputComputerScreenshot, + ComputerToolCallOutputItemParam, + ComputerToolCallOutputItemResource, + ComputerToolCallSafetyCheck, + ComputerUsePreviewTool, + Connection, + ContainerAppAgentDefinition, + ContinuousEvaluationRuleAction, + Coordinate, + CosmosDBIndex, + CreatedBy, + CronTrigger, + CustomCredential, + DailyRecurrenceSchedule, + DatasetCredential, + DatasetVersion, + DeleteAgentResponse, + DeleteAgentVersionResponse, + DeleteMemoryStoreResponse, + Deployment, + EmbeddingConfiguration, + EntraIDCredentials, + EvalCompareReport, + EvalResult, + EvalRunResultCompareItem, + EvalRunResultComparison, + EvalRunResultSummary, + EvaluationComparisonRequest, + EvaluationResultSample, + EvaluationRule, + EvaluationRuleAction, + EvaluationRuleFilter, + EvaluationRunClusterInsightResult, + EvaluationRunClusterInsightsRequest, + EvaluationScheduleTask, + EvaluationTaxonomy, + EvaluationTaxonomyInput, + EvaluatorDefinition, + EvaluatorMetric, + EvaluatorVersion, + FabricDataAgentToolParameters, + FieldMapping, + FileDatasetVersion, + FileSearchTool, + FileSearchToolCallItemParam, + FileSearchToolCallItemParamResult, + FileSearchToolCallItemResource, + FolderDatasetVersion, + FunctionTool, + FunctionToolCallItemParam, + FunctionToolCallItemResource, + FunctionToolCallOutputItemParam, + FunctionToolCallOutputItemResource, + HostedAgentDefinition, + HourlyRecurrenceSchedule, + HumanEvaluationRuleAction, + ImageBasedHostedAgentDefinition, + ImageGenTool, + ImageGenToolCallItemParam, + ImageGenToolCallItemResource, + ImageGenToolInputImageMask, + Index, + Insight, + InsightCluster, + InsightModelConfiguration, + InsightRequest, + InsightResult, + InsightSample, + InsightScheduleTask, + InsightSummary, + InsightsMetadata, + InvokeAzureAgentWorkflowActionOutputItemResource, + ItemContent, + ItemContentInputAudio, + ItemContentInputFile, + ItemContentInputImage, + ItemContentInputText, + ItemContentOutputAudio, + ItemContentOutputText, + ItemContentRefusal, + ItemParam, + ItemReferenceItemParam, + ItemResource, + LocalShellExecAction, + LocalShellTool, + LocalShellToolCallItemParam, + LocalShellToolCallItemResource, + LocalShellToolCallOutputItemParam, + LocalShellToolCallOutputItemResource, + Location, + LogProb, + MCPApprovalRequestItemParam, + MCPApprovalRequestItemResource, + MCPApprovalResponseItemParam, + MCPApprovalResponseItemResource, + MCPCallItemParam, + MCPCallItemResource, + MCPListToolsItemParam, + MCPListToolsItemResource, + MCPListToolsTool, + MCPTool, + MCPToolAllowedTools1, + MCPToolRequireApproval1, + MCPToolRequireApprovalAlways, + MCPToolRequireApprovalNever, + ManagedAzureAISearchIndex, + MemoryItem, + MemoryOperation, + MemorySearchItem, + MemorySearchOptions, + MemorySearchTool, + MemorySearchToolCallItemParam, + MemorySearchToolCallItemResource, + MemoryStoreDefaultDefinition, + MemoryStoreDefaultOptions, + MemoryStoreDefinition, + MemoryStoreDeleteScopeResponse, + MemoryStoreObject, + MemoryStoreOperationUsage, + MemoryStoreOperationUsageInputTokensDetails, + MemoryStoreOperationUsageOutputTokensDetails, + MemoryStoreSearchResponse, + MemoryStoreUpdateResponse, + MemoryStoreUpdateResult, + MicrosoftFabricAgentTool, + ModelDeployment, + ModelDeploymentSku, + MonthlyRecurrenceSchedule, + NoAuthenticationCredentials, + OAuthConsentRequestItemResource, + OneTimeTrigger, + OpenApiAgentTool, + OpenApiAnonymousAuthDetails, + OpenApiAuthDetails, + OpenApiFunctionDefinition, + OpenApiFunctionDefinitionFunction, + OpenApiManagedAuthDetails, + OpenApiManagedSecurityScheme, + OpenApiProjectConnectionAuthDetails, + OpenApiProjectConnectionSecurityScheme, + PagedScheduleRun, + PendingUploadRequest, + PendingUploadResponse, + Prompt, + PromptAgentDefinition, + PromptAgentDefinitionText, + PromptBasedEvaluatorDefinition, + ProtocolVersionRecord, + RaiConfig, + RankingOptions, + Reasoning, + ReasoningItemParam, + ReasoningItemResource, + ReasoningItemSummaryPart, + ReasoningItemSummaryTextPart, + RecurrenceSchedule, + RecurrenceTrigger, + RedTeam, + Response, + ResponseCodeInterpreterCallCodeDeltaEvent, + ResponseCodeInterpreterCallCodeDoneEvent, + ResponseCodeInterpreterCallCompletedEvent, + ResponseCodeInterpreterCallInProgressEvent, + ResponseCodeInterpreterCallInterpretingEvent, + ResponseCompletedEvent, + ResponseContentPartAddedEvent, + ResponseContentPartDoneEvent, + ResponseConversation1, + ResponseCreatedEvent, + ResponseError, + ResponseErrorEvent, + ResponseFailedEvent, + ResponseFileSearchCallCompletedEvent, + ResponseFileSearchCallInProgressEvent, + ResponseFileSearchCallSearchingEvent, + ResponseFormatJsonSchemaSchema, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseImageGenCallCompletedEvent, + ResponseImageGenCallGeneratingEvent, + ResponseImageGenCallInProgressEvent, + ResponseImageGenCallPartialImageEvent, + ResponseInProgressEvent, + ResponseIncompleteDetails1, + ResponseIncompleteEvent, + ResponseMCPCallArgumentsDeltaEvent, + ResponseMCPCallArgumentsDoneEvent, + ResponseMCPCallCompletedEvent, + ResponseMCPCallFailedEvent, + ResponseMCPCallInProgressEvent, + ResponseMCPListToolsCompletedEvent, + ResponseMCPListToolsFailedEvent, + ResponseMCPListToolsInProgressEvent, + ResponseOutputItemAddedEvent, + ResponseOutputItemDoneEvent, + ResponsePromptVariables, + ResponseQueuedEvent, + ResponseReasoningDeltaEvent, + ResponseReasoningDoneEvent, + ResponseReasoningSummaryDeltaEvent, + ResponseReasoningSummaryDoneEvent, + ResponseReasoningSummaryPartAddedEvent, + ResponseReasoningSummaryPartDoneEvent, + ResponseReasoningSummaryTextDeltaEvent, + ResponseReasoningSummaryTextDoneEvent, + ResponseRefusalDeltaEvent, + ResponseRefusalDoneEvent, + ResponseStreamEvent, + ResponseText, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, + ResponseTextFormatConfiguration, + ResponseTextFormatConfigurationJsonObject, + ResponseTextFormatConfigurationJsonSchema, + ResponseTextFormatConfigurationText, + ResponseUsage, + ResponseWebSearchCallCompletedEvent, + ResponseWebSearchCallInProgressEvent, + ResponseWebSearchCallSearchingEvent, + ResponsesAssistantMessageItemParam, + ResponsesAssistantMessageItemResource, + ResponsesDeveloperMessageItemParam, + ResponsesDeveloperMessageItemResource, + ResponsesMessageItemParam, + ResponsesMessageItemResource, + ResponsesSystemMessageItemParam, + ResponsesSystemMessageItemResource, + ResponsesUserMessageItemParam, + ResponsesUserMessageItemResource, + SASCredentials, + Schedule, + ScheduleRun, + ScheduleTask, + SharepointAgentTool, + SharepointGroundingToolParameters, + StructuredInputDefinition, + StructuredOutputDefinition, + StructuredOutputsItemResource, + Target, + TargetConfig, + TaxonomyCategory, + TaxonomySubCategory, + Tool, + ToolArgumentBinding, + ToolChoiceObject, + ToolChoiceObjectCodeInterpreter, + ToolChoiceObjectComputer, + ToolChoiceObjectFileSearch, + ToolChoiceObjectFunction, + ToolChoiceObjectImageGen, + ToolChoiceObjectMCP, + ToolChoiceObjectWebSearch, + ToolDescription, + ToolProjectConnection, + ToolProjectConnectionList, + TopLogProb, + Trigger, + UserProfileMemoryItem, + VectorStoreFileAttributes, + WebSearchAction, + WebSearchActionFind, + WebSearchActionOpenPage, + WebSearchActionSearch, + WebSearchPreviewTool, + WebSearchToolCallItemParam, + WebSearchToolCallItemResource, + WeeklyRecurrenceSchedule, + WorkflowActionOutputItemResource, + WorkflowDefinition, +) + +from ._enums import ( # type: ignore + AgentContainerOperationStatus, + AgentContainerStatus, + AgentKind, + AgentProtocol, + AnnotationType, + AttackStrategy, + AzureAISearchQueryType, + CodeInterpreterOutputType, + ComputerActionType, + ComputerToolCallOutputItemOutputType, + ConnectionType, + CredentialType, + DatasetType, + DayOfWeek, + DeploymentType, + EvaluationRuleActionType, + EvaluationRuleEventType, + EvaluationTaxonomyInputType, + EvaluatorCategory, + EvaluatorDefinitionType, + EvaluatorMetricDirection, + EvaluatorMetricType, + EvaluatorType, + IndexType, + InsightType, + ItemContentType, + ItemType, + LocationType, + MemoryItemKind, + MemoryOperationKind, + MemoryStoreKind, + MemoryStoreUpdateStatus, + OpenApiAuthType, + OperationState, + PendingUploadType, + ReasoningEffort, + ReasoningItemSummaryPartType, + RecurrenceType, + ResponseErrorCode, + ResponseStreamEventType, + ResponseTextFormatConfigurationType, + ResponsesMessageRole, + RiskCategory, + SampleType, + ScheduleProvisioningStatus, + ScheduleTaskType, + ServiceTier, + ToolChoiceObjectType, + ToolChoiceOptions, + ToolType, + TreatmentEffectType, + TriggerType, + WebSearchActionType, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "A2ATool", + "AISearchIndexResource", + "AgentClusterInsightResult", + "AgentClusterInsightsRequest", + "AgentContainerObject", + "AgentContainerOperationError", + "AgentContainerOperationObject", + "AgentDefinition", + "AgentId", + "AgentObject", + "AgentObjectVersions", + "AgentReference", + "AgentTaxonomyInput", + "AgentVersionObject", + "AgenticIdentityCredentials", + "Annotation", + "AnnotationFileCitation", + "AnnotationFilePath", + "AnnotationUrlCitation", + "ApiError", + "ApiErrorResponse", + "ApiInnerError", + "ApiKeyCredentials", + "ApproximateLocation", + "AzureAIAgentTarget", + "AzureAISearchAgentTool", + "AzureAISearchIndex", + "AzureAISearchToolResource", + "AzureFunctionAgentTool", + "AzureFunctionBinding", + "AzureFunctionDefinition", + "AzureFunctionDefinitionFunction", + "AzureFunctionStorageQueue", + "AzureOpenAIModelConfiguration", + "BaseCredentials", + "BingCustomSearchAgentTool", + "BingCustomSearchConfiguration", + "BingCustomSearchToolParameters", + "BingGroundingAgentTool", + "BingGroundingSearchConfiguration", + "BingGroundingSearchToolParameters", + "BlobReference", + "BlobReferenceSasCredential", + "BrowserAutomationAgentTool", + "BrowserAutomationToolConnectionParameters", + "BrowserAutomationToolParameters", + "CaptureStructuredOutputsTool", + "ChartCoordinate", + "ChatSummaryMemoryItem", + "ClusterInsightResult", + "ClusterTokenUsage", + "CodeBasedEvaluatorDefinition", + "CodeInterpreterOutput", + "CodeInterpreterOutputImage", + "CodeInterpreterOutputLogs", + "CodeInterpreterTool", + "CodeInterpreterToolAuto", + "CodeInterpreterToolCallItemParam", + "CodeInterpreterToolCallItemResource", + "ComparisonFilter", + "CompoundFilter", + "ComputerAction", + "ComputerActionClick", + "ComputerActionDoubleClick", + "ComputerActionDrag", + "ComputerActionKeyPress", + "ComputerActionMove", + "ComputerActionScreenshot", + "ComputerActionScroll", + "ComputerActionTypeKeys", + "ComputerActionWait", + "ComputerToolCallItemParam", + "ComputerToolCallItemResource", + "ComputerToolCallOutputItemOutput", + "ComputerToolCallOutputItemOutputComputerScreenshot", + "ComputerToolCallOutputItemParam", + "ComputerToolCallOutputItemResource", + "ComputerToolCallSafetyCheck", + "ComputerUsePreviewTool", + "Connection", + "ContainerAppAgentDefinition", + "ContinuousEvaluationRuleAction", + "Coordinate", + "CosmosDBIndex", + "CreatedBy", + "CronTrigger", + "CustomCredential", + "DailyRecurrenceSchedule", + "DatasetCredential", + "DatasetVersion", + "DeleteAgentResponse", + "DeleteAgentVersionResponse", + "DeleteMemoryStoreResponse", + "Deployment", + "EmbeddingConfiguration", + "EntraIDCredentials", + "EvalCompareReport", + "EvalResult", + "EvalRunResultCompareItem", + "EvalRunResultComparison", + "EvalRunResultSummary", + "EvaluationComparisonRequest", + "EvaluationResultSample", + "EvaluationRule", + "EvaluationRuleAction", + "EvaluationRuleFilter", + "EvaluationRunClusterInsightResult", + "EvaluationRunClusterInsightsRequest", + "EvaluationScheduleTask", + "EvaluationTaxonomy", + "EvaluationTaxonomyInput", + "EvaluatorDefinition", + "EvaluatorMetric", + "EvaluatorVersion", + "FabricDataAgentToolParameters", + "FieldMapping", + "FileDatasetVersion", + "FileSearchTool", + "FileSearchToolCallItemParam", + "FileSearchToolCallItemParamResult", + "FileSearchToolCallItemResource", + "FolderDatasetVersion", + "FunctionTool", + "FunctionToolCallItemParam", + "FunctionToolCallItemResource", + "FunctionToolCallOutputItemParam", + "FunctionToolCallOutputItemResource", + "HostedAgentDefinition", + "HourlyRecurrenceSchedule", + "HumanEvaluationRuleAction", + "ImageBasedHostedAgentDefinition", + "ImageGenTool", + "ImageGenToolCallItemParam", + "ImageGenToolCallItemResource", + "ImageGenToolInputImageMask", + "Index", + "Insight", + "InsightCluster", + "InsightModelConfiguration", + "InsightRequest", + "InsightResult", + "InsightSample", + "InsightScheduleTask", + "InsightSummary", + "InsightsMetadata", + "InvokeAzureAgentWorkflowActionOutputItemResource", + "ItemContent", + "ItemContentInputAudio", + "ItemContentInputFile", + "ItemContentInputImage", + "ItemContentInputText", + "ItemContentOutputAudio", + "ItemContentOutputText", + "ItemContentRefusal", + "ItemParam", + "ItemReferenceItemParam", + "ItemResource", + "LocalShellExecAction", + "LocalShellTool", + "LocalShellToolCallItemParam", + "LocalShellToolCallItemResource", + "LocalShellToolCallOutputItemParam", + "LocalShellToolCallOutputItemResource", + "Location", + "LogProb", + "MCPApprovalRequestItemParam", + "MCPApprovalRequestItemResource", + "MCPApprovalResponseItemParam", + "MCPApprovalResponseItemResource", + "MCPCallItemParam", + "MCPCallItemResource", + "MCPListToolsItemParam", + "MCPListToolsItemResource", + "MCPListToolsTool", + "MCPTool", + "MCPToolAllowedTools1", + "MCPToolRequireApproval1", + "MCPToolRequireApprovalAlways", + "MCPToolRequireApprovalNever", + "ManagedAzureAISearchIndex", + "MemoryItem", + "MemoryOperation", + "MemorySearchItem", + "MemorySearchOptions", + "MemorySearchTool", + "MemorySearchToolCallItemParam", + "MemorySearchToolCallItemResource", + "MemoryStoreDefaultDefinition", + "MemoryStoreDefaultOptions", + "MemoryStoreDefinition", + "MemoryStoreDeleteScopeResponse", + "MemoryStoreObject", + "MemoryStoreOperationUsage", + "MemoryStoreOperationUsageInputTokensDetails", + "MemoryStoreOperationUsageOutputTokensDetails", + "MemoryStoreSearchResponse", + "MemoryStoreUpdateResponse", + "MemoryStoreUpdateResult", + "MicrosoftFabricAgentTool", + "ModelDeployment", + "ModelDeploymentSku", + "MonthlyRecurrenceSchedule", + "NoAuthenticationCredentials", + "OAuthConsentRequestItemResource", + "OneTimeTrigger", + "OpenApiAgentTool", + "OpenApiAnonymousAuthDetails", + "OpenApiAuthDetails", + "OpenApiFunctionDefinition", + "OpenApiFunctionDefinitionFunction", + "OpenApiManagedAuthDetails", + "OpenApiManagedSecurityScheme", + "OpenApiProjectConnectionAuthDetails", + "OpenApiProjectConnectionSecurityScheme", + "PagedScheduleRun", + "PendingUploadRequest", + "PendingUploadResponse", + "Prompt", + "PromptAgentDefinition", + "PromptAgentDefinitionText", + "PromptBasedEvaluatorDefinition", + "ProtocolVersionRecord", + "RaiConfig", + "RankingOptions", + "Reasoning", + "ReasoningItemParam", + "ReasoningItemResource", + "ReasoningItemSummaryPart", + "ReasoningItemSummaryTextPart", + "RecurrenceSchedule", + "RecurrenceTrigger", + "RedTeam", + "Response", + "ResponseCodeInterpreterCallCodeDeltaEvent", + "ResponseCodeInterpreterCallCodeDoneEvent", + "ResponseCodeInterpreterCallCompletedEvent", + "ResponseCodeInterpreterCallInProgressEvent", + "ResponseCodeInterpreterCallInterpretingEvent", + "ResponseCompletedEvent", + "ResponseContentPartAddedEvent", + "ResponseContentPartDoneEvent", + "ResponseConversation1", + "ResponseCreatedEvent", + "ResponseError", + "ResponseErrorEvent", + "ResponseFailedEvent", + "ResponseFileSearchCallCompletedEvent", + "ResponseFileSearchCallInProgressEvent", + "ResponseFileSearchCallSearchingEvent", + "ResponseFormatJsonSchemaSchema", + "ResponseFunctionCallArgumentsDeltaEvent", + "ResponseFunctionCallArgumentsDoneEvent", + "ResponseImageGenCallCompletedEvent", + "ResponseImageGenCallGeneratingEvent", + "ResponseImageGenCallInProgressEvent", + "ResponseImageGenCallPartialImageEvent", + "ResponseInProgressEvent", + "ResponseIncompleteDetails1", + "ResponseIncompleteEvent", + "ResponseMCPCallArgumentsDeltaEvent", + "ResponseMCPCallArgumentsDoneEvent", + "ResponseMCPCallCompletedEvent", + "ResponseMCPCallFailedEvent", + "ResponseMCPCallInProgressEvent", + "ResponseMCPListToolsCompletedEvent", + "ResponseMCPListToolsFailedEvent", + "ResponseMCPListToolsInProgressEvent", + "ResponseOutputItemAddedEvent", + "ResponseOutputItemDoneEvent", + "ResponsePromptVariables", + "ResponseQueuedEvent", + "ResponseReasoningDeltaEvent", + "ResponseReasoningDoneEvent", + "ResponseReasoningSummaryDeltaEvent", + "ResponseReasoningSummaryDoneEvent", + "ResponseReasoningSummaryPartAddedEvent", + "ResponseReasoningSummaryPartDoneEvent", + "ResponseReasoningSummaryTextDeltaEvent", + "ResponseReasoningSummaryTextDoneEvent", + "ResponseRefusalDeltaEvent", + "ResponseRefusalDoneEvent", + "ResponseStreamEvent", + "ResponseText", + "ResponseTextDeltaEvent", + "ResponseTextDoneEvent", + "ResponseTextFormatConfiguration", + "ResponseTextFormatConfigurationJsonObject", + "ResponseTextFormatConfigurationJsonSchema", + "ResponseTextFormatConfigurationText", + "ResponseUsage", + "ResponseWebSearchCallCompletedEvent", + "ResponseWebSearchCallInProgressEvent", + "ResponseWebSearchCallSearchingEvent", + "ResponsesAssistantMessageItemParam", + "ResponsesAssistantMessageItemResource", + "ResponsesDeveloperMessageItemParam", + "ResponsesDeveloperMessageItemResource", + "ResponsesMessageItemParam", + "ResponsesMessageItemResource", + "ResponsesSystemMessageItemParam", + "ResponsesSystemMessageItemResource", + "ResponsesUserMessageItemParam", + "ResponsesUserMessageItemResource", + "SASCredentials", + "Schedule", + "ScheduleRun", + "ScheduleTask", + "SharepointAgentTool", + "SharepointGroundingToolParameters", + "StructuredInputDefinition", + "StructuredOutputDefinition", + "StructuredOutputsItemResource", + "Target", + "TargetConfig", + "TaxonomyCategory", + "TaxonomySubCategory", + "Tool", + "ToolArgumentBinding", + "ToolChoiceObject", + "ToolChoiceObjectCodeInterpreter", + "ToolChoiceObjectComputer", + "ToolChoiceObjectFileSearch", + "ToolChoiceObjectFunction", + "ToolChoiceObjectImageGen", + "ToolChoiceObjectMCP", + "ToolChoiceObjectWebSearch", + "ToolDescription", + "ToolProjectConnection", + "ToolProjectConnectionList", + "TopLogProb", + "Trigger", + "UserProfileMemoryItem", + "VectorStoreFileAttributes", + "WebSearchAction", + "WebSearchActionFind", + "WebSearchActionOpenPage", + "WebSearchActionSearch", + "WebSearchPreviewTool", + "WebSearchToolCallItemParam", + "WebSearchToolCallItemResource", + "WeeklyRecurrenceSchedule", + "WorkflowActionOutputItemResource", + "WorkflowDefinition", + "AgentContainerOperationStatus", + "AgentContainerStatus", + "AgentKind", + "AgentProtocol", + "AnnotationType", + "AttackStrategy", + "AzureAISearchQueryType", + "CodeInterpreterOutputType", + "ComputerActionType", + "ComputerToolCallOutputItemOutputType", + "ConnectionType", + "CredentialType", + "DatasetType", + "DayOfWeek", + "DeploymentType", + "EvaluationRuleActionType", + "EvaluationRuleEventType", + "EvaluationTaxonomyInputType", + "EvaluatorCategory", + "EvaluatorDefinitionType", + "EvaluatorMetricDirection", + "EvaluatorMetricType", + "EvaluatorType", + "IndexType", + "InsightType", + "ItemContentType", + "ItemType", + "LocationType", + "MemoryItemKind", + "MemoryOperationKind", + "MemoryStoreKind", + "MemoryStoreUpdateStatus", + "OpenApiAuthType", + "OperationState", + "PendingUploadType", + "ReasoningEffort", + "ReasoningItemSummaryPartType", + "RecurrenceType", + "ResponseErrorCode", + "ResponseStreamEventType", + "ResponseTextFormatConfigurationType", + "ResponsesMessageRole", + "RiskCategory", + "SampleType", + "ScheduleProvisioningStatus", + "ScheduleTaskType", + "ServiceTier", + "ToolChoiceObjectType", + "ToolChoiceOptions", + "ToolType", + "TreatmentEffectType", + "TriggerType", + "WebSearchActionType", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py new file mode 100644 index 000000000000..ea4ebc59efd7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py @@ -0,0 +1,767 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AgentContainerOperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of the container operation for a specific version of an agent.""" + + NOT_STARTED = "NotStarted" + """The container operation is not started.""" + IN_PROGRESS = "InProgress" + """The container operation is in progress.""" + SUCCEEDED = "Succeeded" + """The container operation has succeeded.""" + FAILED = "Failed" + """The container operation has failed.""" + + +class AgentContainerStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of the container of a specific version of an agent.""" + + STARTING = "Starting" + """The container is starting.""" + RUNNING = "Running" + """The container is running.""" + STOPPING = "Stopping" + """The container is stopping.""" + STOPPED = "Stopped" + """The container is stopped.""" + FAILED = "Failed" + """The container has failed.""" + DELETING = "Deleting" + """The container is deleting.""" + DELETED = "Deleted" + """The container is deleted.""" + UPDATING = "Updating" + """The container is updating.""" + + +class AgentKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of AgentKind.""" + + PROMPT = "prompt" + HOSTED = "hosted" + CONTAINER_APP = "container_app" + WORKFLOW = "workflow" + + +class AgentProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of AgentProtocol.""" + + ACTIVITY_PROTOCOL = "activity_protocol" + RESPONSES = "responses" + + +class AnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of AnnotationType.""" + + FILE_CITATION = "file_citation" + URL_CITATION = "url_citation" + FILE_PATH = "file_path" + CONTAINER_FILE_CITATION = "container_file_citation" + + +class AttackStrategy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Strategies for attacks.""" + + EASY = "easy" + """Represents a default set of easy complexity attacks. Easy complexity attacks require less + effort, such as translation of a prompt into some encoding, and does not require any Large + Language Model to convert or orchestrate.""" + MODERATE = "moderate" + """Represents a default set of moderate complexity attacks. Moderate complexity attacks require + having access to resources such as another generative AI model.""" + DIFFICULT = "difficult" + """Represents a default set of difficult complexity attacks. Difficult complexity attacks include + attacks that require access to significant resources and effort to execute an attack such as + knowledge of search-based algorithms in addition to a generative AI model.""" + ASCII_ART = "ascii_art" + """Generates visual art using ASCII characters, often used for creative or obfuscation purposes.""" + ASCII_SMUGGLER = "ascii_smuggler" + """Conceals data within ASCII characters, making it harder to detect.""" + ATBASH = "atbash" + """Implements the Atbash cipher, a simple substitution cipher where each letter is mapped to its + reverse.""" + BASE64 = "base64" + """Encodes binary data into a text format using Base64, commonly used for data transmission.""" + BINARY = "binary" + """Converts text into binary code, representing data in a series of 0s and 1s.""" + CAESAR = "caesar" + """Applies the Caesar cipher, a substitution cipher that shifts characters by a fixed number of + positions.""" + CHARACTER_SPACE = "character_space" + """Alters text by adding spaces between characters, often used for obfuscation.""" + JAILBREAK = "jailbreak" + """Injects specially crafted prompts to bypass AI safeguards, known as User Injected Prompt + Attacks (UPIA).""" + ANSII_ATTACK = "ansii_attack" + """Utilizes ANSI escape sequences to manipulate text appearance and behavior.""" + CHARACTER_SWAP = "character_swap" + """Swaps characters within text to create variations or obfuscate the original content.""" + SUFFIX_APPEND = "suffix_append" + """Appends an adversarial suffix to the prompt.""" + STRING_JOIN = "string_join" + """Joins multiple strings together, often used for concatenation or obfuscation.""" + UNICODE_CONFUSABLE = "unicode_confusable" + """Uses Unicode characters that look similar to standard characters, creating visual confusion.""" + UNICODE_SUBSTITUTION = "unicode_substitution" + """Substitutes standard characters with Unicode equivalents, often for obfuscation.""" + DIACRITIC = "diacritic" + """Adds diacritical marks to characters, changing their appearance and sometimes their meaning.""" + FLIP = "flip" + """Flips characters from front to back, creating a mirrored effect.""" + LEETSPEAK = "leetspeak" + """Transforms text into Leetspeak, a form of encoding that replaces letters with similar-looking + numbers or symbols.""" + ROT13 = "rot13" + """Applies the ROT13 cipher, a simple substitution cipher that shifts characters by 13 positions.""" + MORSE = "morse" + """Encodes text into Morse code, using dots and dashes to represent characters.""" + URL = "url" + """Encodes text into URL format.""" + BASELINE = "baseline" + """Represents the baseline direct adversarial probing, which is used by attack strategies as the + attack objective.""" + + +class AzureAISearchQueryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Available query types for Azure AI Search tool.""" + + SIMPLE = "simple" + """Query type ``simple``""" + SEMANTIC = "semantic" + """Query type ``semantic``""" + VECTOR = "vector" + """Query type ``vector``""" + VECTOR_SIMPLE_HYBRID = "vector_simple_hybrid" + """Query type ``vector_simple_hybrid``""" + VECTOR_SEMANTIC_HYBRID = "vector_semantic_hybrid" + """Query type ``vector_semantic_hybrid``""" + + +class CodeInterpreterOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of CodeInterpreterOutputType.""" + + LOGS = "logs" + IMAGE = "image" + + +class ComputerActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ComputerActionType.""" + + SCREENSHOT = "screenshot" + CLICK = "click" + DOUBLE_CLICK = "double_click" + SCROLL = "scroll" + TYPE = "type" + WAIT = "wait" + KEYPRESS = "keypress" + DRAG = "drag" + MOVE = "move" + + +class ComputerToolCallOutputItemOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """A computer screenshot image used with the computer use tool.""" + + SCREENSHOT = "computer_screenshot" + + +class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Type (or category) of the connection.""" + + AZURE_OPEN_AI = "AzureOpenAI" + """Azure OpenAI Service""" + AZURE_BLOB_STORAGE = "AzureBlob" + """Azure Blob Storage, with specified container""" + AZURE_STORAGE_ACCOUNT = "AzureStorageAccount" + """Azure Blob Storage, with container not specified (used by Agents)""" + AZURE_AI_SEARCH = "CognitiveSearch" + """Azure AI Search""" + COSMOS_DB = "CosmosDB" + """CosmosDB""" + API_KEY = "ApiKey" + """Generic connection that uses API Key authentication""" + APPLICATION_CONFIGURATION = "AppConfig" + """Application Configuration""" + APPLICATION_INSIGHTS = "AppInsights" + """Application Insights""" + CUSTOM = "CustomKeys" + """Custom Keys""" + REMOTE_TOOL = "RemoteTool" + """Remote tool""" + + +class CredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The credential type used by the connection.""" + + API_KEY = "ApiKey" + """API Key credential""" + ENTRA_ID = "AAD" + """Entra ID credential (formerly known as AAD)""" + SAS = "SAS" + """Shared Access Signature (SAS) credential""" + CUSTOM = "CustomKeys" + """Custom credential""" + NONE = "None" + """No credential""" + AGENTIC_IDENTITY = "AgenticIdentityToken" + """Agentic identity credential""" + + +class DatasetType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data.""" + + URI_FILE = "uri_file" + """URI file.""" + URI_FOLDER = "uri_folder" + """URI folder.""" + + +class DayOfWeek(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Days of the week for recurrence schedule.""" + + SUNDAY = "Sunday" + """Sunday.""" + MONDAY = "Monday" + """Monday.""" + TUESDAY = "Tuesday" + """Tuesday.""" + WEDNESDAY = "Wednesday" + """Wednesday.""" + THURSDAY = "Thursday" + """Thursday.""" + FRIDAY = "Friday" + """Friday.""" + SATURDAY = "Saturday" + """Saturday.""" + + +class DeploymentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of DeploymentType.""" + + MODEL_DEPLOYMENT = "ModelDeployment" + """Model deployment""" + + +class EvaluationRuleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the evaluation action.""" + + CONTINUOUS_EVALUATION = "continuousEvaluation" + """Continuous evaluation.""" + HUMAN_EVALUATION = "humanEvaluation" + """Human evaluation.""" + + +class EvaluationRuleEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the evaluation rule event.""" + + RESPONSE_COMPLETED = "response.completed" + """Response completed.""" + MANUAL = "manual" + """Manual trigger.""" + + +class EvaluationTaxonomyInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the evaluation taxonomy input.""" + + AGENT = "agent" + """Agent""" + POLICY = "policy" + """Policy.""" + + +class EvaluatorCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The category of the evaluator.""" + + QUALITY = "quality" + """Quality""" + SAFETY = "safety" + """Risk & Safety""" + AGENTS = "agents" + """Agents""" + + +class EvaluatorDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of evaluator definition.""" + + PROMPT = "prompt" + """Prompt-based definition""" + CODE = "code" + """Code-based definition""" + PROMPT_AND_CODE = "prompt_and_code" + """Prompt & Code Based definition""" + SERVICE = "service" + """Service-based evaluator""" + OPENAI_GRADERS = "openai_graders" + """OpenAI graders""" + + +class EvaluatorMetricDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The direction of the metric indicating whether a higher value is better, a lower value is + better, or neutral. + """ + + INCREASE = "increase" + """It indicates a higher value is better for this metric""" + DECREASE = "decrease" + """It indicates a lower value is better for this metric""" + NEUTRAL = "neutral" + """It indicates no preference for this metric direction""" + + +class EvaluatorMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the evaluator.""" + + ORDINAL = "ordinal" + """Ordinal metric representing categories that can be ordered or ranked.""" + CONTINUOUS = "continuous" + """Continuous metric representing values in a continuous range.""" + BOOLEAN = "boolean" + """Boolean metric representing true/false values""" + + +class EvaluatorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the evaluator.""" + + BUILT_IN = "builtin" + """Built-in evaluator (Microsoft provided)""" + CUSTOM = "custom" + """Custom evaluator""" + + +class IndexType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of IndexType.""" + + AZURE_SEARCH = "AzureSearch" + """Azure search""" + COSMOS_DB = "CosmosDBNoSqlVectorStore" + """CosmosDB""" + MANAGED_AZURE_SEARCH = "ManagedAzureSearch" + """Managed Azure Search""" + + +class InsightType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The request of the insights.""" + + EVALUATION_RUN_CLUSTER_INSIGHT = "EvaluationRunClusterInsight" + """Insights on an Evaluation run result.""" + AGENT_CLUSTER_INSIGHT = "AgentClusterInsight" + """Cluster Insight on an Agent.""" + EVALUATION_COMPARISON = "EvaluationComparison" + """Evaluation Comparison.""" + + +class ItemContentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Multi-modal input and output contents.""" + + INPUT_TEXT = "input_text" + INPUT_AUDIO = "input_audio" + INPUT_IMAGE = "input_image" + INPUT_FILE = "input_file" + OUTPUT_TEXT = "output_text" + OUTPUT_AUDIO = "output_audio" + REFUSAL = "refusal" + + +class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ItemType.""" + + MESSAGE = "message" + FILE_SEARCH_CALL = "file_search_call" + FUNCTION_CALL = "function_call" + FUNCTION_CALL_OUTPUT = "function_call_output" + COMPUTER_CALL = "computer_call" + COMPUTER_CALL_OUTPUT = "computer_call_output" + WEB_SEARCH_CALL = "web_search_call" + REASONING = "reasoning" + ITEM_REFERENCE = "item_reference" + IMAGE_GENERATION_CALL = "image_generation_call" + CODE_INTERPRETER_CALL = "code_interpreter_call" + LOCAL_SHELL_CALL = "local_shell_call" + LOCAL_SHELL_CALL_OUTPUT = "local_shell_call_output" + MCP_LIST_TOOLS = "mcp_list_tools" + MCP_APPROVAL_REQUEST = "mcp_approval_request" + MCP_APPROVAL_RESPONSE = "mcp_approval_response" + MCP_CALL = "mcp_call" + STRUCTURED_OUTPUTS = "structured_outputs" + WORKFLOW_ACTION = "workflow_action" + MEMORY_SEARCH_CALL = "memory_search_call" + OAUTH_CONSENT_REQUEST = "oauth_consent_request" + + +class LocationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of LocationType.""" + + APPROXIMATE = "approximate" + + +class MemoryItemKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Memory item kind.""" + + USER_PROFILE = "user_profile" + """User profile information extracted from conversations.""" + CHAT_SUMMARY = "chat_summary" + """Summary of chat conversations.""" + + +class MemoryOperationKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Memory operation kind.""" + + CREATE = "create" + """Create a new memory item.""" + UPDATE = "update" + """Update an existing memory item.""" + DELETE = "delete" + """Delete an existing memory item.""" + + +class MemoryStoreKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of memory store implementation to use.""" + + DEFAULT = "default" + """The default memory store implementation.""" + + +class MemoryStoreUpdateStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of a memory store update operation.""" + + QUEUED = "queued" + IN_PROGRESS = "in_progress" + COMPLETED = "completed" + FAILED = "failed" + SUPERSEDED = "superseded" + + +class OpenApiAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Authentication type for OpenApi endpoint. Allowed types are: + * Anonymous (no authentication required) + * Project Connection (requires project_connection_id to endpoint, as setup in AI Foundry) + * Managed_Identity (requires audience for identity based auth). + """ + + ANONYMOUS = "anonymous" + PROJECT_CONNECTION = "project_connection" + MANAGED_IDENTITY = "managed_identity" + + +class OperationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum describing allowed operation states.""" + + NOT_STARTED = "NotStarted" + """The operation has not started.""" + RUNNING = "Running" + """The operation is in progress.""" + SUCCEEDED = "Succeeded" + """The operation has completed successfully.""" + FAILED = "Failed" + """The operation has failed.""" + CANCELED = "Canceled" + """The operation has been canceled by the user.""" + + +class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of pending upload.""" + + NONE = "None" + """No pending upload.""" + BLOB_REFERENCE = "BlobReference" + """Blob Reference is the only supported type.""" + + +class ReasoningEffort(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """**o-series models only** + Constrains effort on reasoning for + `reasoning models `_. + Currently supported values are ``low``, ``medium``, and ``high``. Reducing + reasoning effort can result in faster responses and fewer tokens used + on reasoning in a response. + """ + + LOW = "low" + MEDIUM = "medium" + HIGH = "high" + + +class ReasoningItemSummaryPartType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ReasoningItemSummaryPartType.""" + + SUMMARY_TEXT = "summary_text" + + +class RecurrenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Recurrence type.""" + + HOURLY = "Hourly" + """Hourly recurrence pattern.""" + DAILY = "Daily" + """Daily recurrence pattern.""" + WEEKLY = "Weekly" + """Weekly recurrence pattern.""" + MONTHLY = "Monthly" + """Monthly recurrence pattern.""" + + +class ResponseErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The error code for the response.""" + + SERVER_ERROR = "server_error" + RATE_LIMIT_EXCEEDED = "rate_limit_exceeded" + INVALID_PROMPT = "invalid_prompt" + VECTOR_STORE_TIMEOUT = "vector_store_timeout" + INVALID_IMAGE = "invalid_image" + INVALID_IMAGE_FORMAT = "invalid_image_format" + INVALID_BASE64_IMAGE = "invalid_base64_image" + INVALID_IMAGE_URL = "invalid_image_url" + IMAGE_TOO_LARGE = "image_too_large" + IMAGE_TOO_SMALL = "image_too_small" + IMAGE_PARSE_ERROR = "image_parse_error" + IMAGE_CONTENT_POLICY_VIOLATION = "image_content_policy_violation" + INVALID_IMAGE_MODE = "invalid_image_mode" + IMAGE_FILE_TOO_LARGE = "image_file_too_large" + UNSUPPORTED_IMAGE_MEDIA_TYPE = "unsupported_image_media_type" + EMPTY_IMAGE_FILE = "empty_image_file" + FAILED_TO_DOWNLOAD_IMAGE = "failed_to_download_image" + IMAGE_FILE_NOT_FOUND = "image_file_not_found" + + +class ResponsesMessageRole(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The collection of valid roles for responses message items.""" + + SYSTEM = "system" + DEVELOPER = "developer" + USER = "user" + ASSISTANT = "assistant" + + +class ResponseStreamEventType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of ResponseStreamEventType.""" + + RESPONSE_AUDIO_DELTA = "response.audio.delta" + RESPONSE_AUDIO_DONE = "response.audio.done" + RESPONSE_AUDIO_TRANSCRIPT_DELTA = "response.audio_transcript.delta" + RESPONSE_AUDIO_TRANSCRIPT_DONE = "response.audio_transcript.done" + RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA = "response.code_interpreter_call_code.delta" + RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE = "response.code_interpreter_call_code.done" + RESPONSE_CODE_INTERPRETER_CALL_COMPLETED = "response.code_interpreter_call.completed" + RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS = "response.code_interpreter_call.in_progress" + RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING = "response.code_interpreter_call.interpreting" + RESPONSE_COMPLETED = "response.completed" + RESPONSE_CONTENT_PART_ADDED = "response.content_part.added" + RESPONSE_CONTENT_PART_DONE = "response.content_part.done" + RESPONSE_CREATED = "response.created" + ERROR = "error" + RESPONSE_FILE_SEARCH_CALL_COMPLETED = "response.file_search_call.completed" + RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS = "response.file_search_call.in_progress" + RESPONSE_FILE_SEARCH_CALL_SEARCHING = "response.file_search_call.searching" + RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA = "response.function_call_arguments.delta" + RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE = "response.function_call_arguments.done" + RESPONSE_IN_PROGRESS = "response.in_progress" + RESPONSE_FAILED = "response.failed" + RESPONSE_INCOMPLETE = "response.incomplete" + RESPONSE_OUTPUT_ITEM_ADDED = "response.output_item.added" + RESPONSE_OUTPUT_ITEM_DONE = "response.output_item.done" + RESPONSE_REFUSAL_DELTA = "response.refusal.delta" + RESPONSE_REFUSAL_DONE = "response.refusal.done" + RESPONSE_OUTPUT_TEXT_ANNOTATION_ADDED = "response.output_text.annotation.added" + RESPONSE_OUTPUT_TEXT_DELTA = "response.output_text.delta" + RESPONSE_OUTPUT_TEXT_DONE = "response.output_text.done" + RESPONSE_REASONING_SUMMARY_PART_ADDED = "response.reasoning_summary_part.added" + RESPONSE_REASONING_SUMMARY_PART_DONE = "response.reasoning_summary_part.done" + RESPONSE_REASONING_SUMMARY_TEXT_DELTA = "response.reasoning_summary_text.delta" + RESPONSE_REASONING_SUMMARY_TEXT_DONE = "response.reasoning_summary_text.done" + RESPONSE_WEB_SEARCH_CALL_COMPLETED = "response.web_search_call.completed" + RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS = "response.web_search_call.in_progress" + RESPONSE_WEB_SEARCH_CALL_SEARCHING = "response.web_search_call.searching" + RESPONSE_IMAGE_GENERATION_CALL_COMPLETED = "response.image_generation_call.completed" + RESPONSE_IMAGE_GENERATION_CALL_GENERATING = "response.image_generation_call.generating" + RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS = "response.image_generation_call.in_progress" + RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE = "response.image_generation_call.partial_image" + RESPONSE_MCP_CALL_ARGUMENTS_DELTA = "response.mcp_call.arguments_delta" + RESPONSE_MCP_CALL_ARGUMENTS_DONE = "response.mcp_call.arguments_done" + RESPONSE_MCP_CALL_COMPLETED = "response.mcp_call.completed" + RESPONSE_MCP_CALL_FAILED = "response.mcp_call.failed" + RESPONSE_MCP_CALL_IN_PROGRESS = "response.mcp_call.in_progress" + RESPONSE_MCP_LIST_TOOLS_COMPLETED = "response.mcp_list_tools.completed" + RESPONSE_MCP_LIST_TOOLS_FAILED = "response.mcp_list_tools.failed" + RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS = "response.mcp_list_tools.in_progress" + RESPONSE_QUEUED = "response.queued" + RESPONSE_REASONING_DELTA = "response.reasoning.delta" + RESPONSE_REASONING_DONE = "response.reasoning.done" + RESPONSE_REASONING_SUMMARY_DELTA = "response.reasoning_summary.delta" + RESPONSE_REASONING_SUMMARY_DONE = "response.reasoning_summary.done" + + +class ResponseTextFormatConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An object specifying the format that the model must output. + Configuring ``{ "type": "json_schema" }`` enables Structured Outputs, + which ensures the model will match your supplied JSON schema. Learn more in the + `Structured Outputs guide `_. + The default format is ``{ "type": "text" }`` with no additional options. + **Not recommended for gpt-4o and newer models:** + Setting to ``{ "type": "json_object" }`` enables the older JSON mode, which + ensures the message the model generates is valid JSON. Using ``json_schema`` + is preferred for models that support it. + """ + + TEXT = "text" + JSON_SCHEMA = "json_schema" + JSON_OBJECT = "json_object" + + +class RiskCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Risk category for the attack objective.""" + + HATE_UNFAIRNESS = "HateUnfairness" + """Represents content related to hate or unfairness.""" + VIOLENCE = "Violence" + """Represents content related to violence.""" + SEXUAL = "Sexual" + """Represents content of a sexual nature.""" + SELF_HARM = "SelfHarm" + """Represents content related to self-harm.""" + + +class SampleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of sample used in the analysis.""" + + EVALUATION_RESULT_SAMPLE = "EvaluationResultSample" + """A sample from the evaluation result.""" + + +class ScheduleProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Schedule provisioning status.""" + + CREATING = "Creating" + """Represents the creation status of the schedule.""" + UPDATING = "Updating" + """Represents the updating status of the schedule.""" + DELETING = "Deleting" + """Represents the deleting status of the schedule.""" + SUCCEEDED = "Succeeded" + """Represents the succeeded status of the schedule.""" + FAILED = "Failed" + """Represents the failed status of the schedule.""" + + +class ScheduleTaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the task.""" + + EVALUATION = "Evaluation" + """Evaluation task.""" + INSIGHT = "Insight" + """Insight task.""" + + +class ServiceTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Specifies the processing type used for serving the request. + * If set to 'auto', then the request will be processed with the service tier configured in the + Project settings. Unless otherwise configured, the Project will use 'default'. + * If set to 'default', then the request will be processed with the standard pricing and + performance for the selected model. + * If set to '[flex](/docs/guides/flex-processing)' or 'priority', then the request will be + processed with the corresponding service tier. [Contact + sales](https://openai.com/contact-sales) to learn more about Priority processing. + * When not set, the default behavior is 'auto'. + When the ``service_tier`` parameter is set, the response body will include the + ``service_tier`` value based on the processing mode actually used to serve the request. This + response value may be different from the value set in the parameter. + """ + + AUTO = "auto" + DEFAULT = "default" + FLEX = "flex" + SCALE = "scale" + PRIORITY = "priority" + + +class ToolChoiceObjectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates that the model should use a built-in tool to generate a response. + `Learn more about built-in tools `_. + """ + + FILE_SEARCH = "file_search" + FUNCTION = "function" + COMPUTER = "computer_use_preview" + WEB_SEARCH = "web_search_preview" + IMAGE_GENERATION = "image_generation" + CODE_INTERPRETER = "code_interpreter" + MCP = "mcp" + + +class ToolChoiceOptions(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Controls which (if any) tool is called by the model. + ``none`` means the model will not call any tool and instead generates a message. + ``auto`` means the model can pick between generating a message or calling one or + more tools. + ``required`` means the model must call one or more tools. + """ + + NONE = "none" + AUTO = "auto" + REQUIRED = "required" + + +class ToolType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """A tool that can be used to generate a response.""" + + FILE_SEARCH = "file_search" + FUNCTION = "function" + COMPUTER_USE_PREVIEW = "computer_use_preview" + WEB_SEARCH_PREVIEW = "web_search_preview" + MCP = "mcp" + CODE_INTERPRETER = "code_interpreter" + IMAGE_GENERATION = "image_generation" + LOCAL_SHELL = "local_shell" + BING_GROUNDING = "bing_grounding" + BROWSER_AUTOMATION_PREVIEW = "browser_automation_preview" + FABRIC_DATAAGENT_PREVIEW = "fabric_dataagent_preview" + SHAREPOINT_GROUNDING_PREVIEW = "sharepoint_grounding_preview" + AZURE_AI_SEARCH = "azure_ai_search" + OPENAPI = "openapi" + BING_CUSTOM_SEARCH_PREVIEW = "bing_custom_search_preview" + CAPTURE_STRUCTURED_OUTPUTS = "capture_structured_outputs" + A2_A_PREVIEW = "a2a_preview" + AZURE_FUNCTION = "azure_function" + MEMORY_SEARCH = "memory_search" + + +class TreatmentEffectType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Treatment Effect Type.""" + + TOO_FEW_SAMPLES = "TooFewSamples" + """Not enough samples to determine treatment effect.""" + INCONCLUSIVE = "Inconclusive" + """No significant difference between treatment and baseline.""" + CHANGED = "Changed" + """Indicates the metric changed with statistical significance, but the direction is neutral.""" + IMPROVED = "Improved" + """Indicates the treatment significantly improved the metric compared to baseline.""" + DEGRADED = "Degraded" + """Indicates the treatment significantly degraded the metric compared to baseline.""" + + +class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the trigger.""" + + CRON = "Cron" + """Cron based trigger.""" + RECURRENCE = "Recurrence" + """Recurrence based trigger.""" + ONE_TIME = "OneTime" + """One-time trigger.""" + + +class WebSearchActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of WebSearchActionType.""" + + SEARCH = "search" + OPEN_PAGE = "open_page" + FIND = "find" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py new file mode 100644 index 000000000000..a810ddc805c3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py @@ -0,0 +1,15049 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from ._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import ( + AgentKind, + AnnotationType, + CodeInterpreterOutputType, + ComputerActionType, + ComputerToolCallOutputItemOutputType, + CredentialType, + DatasetType, + DeploymentType, + EvaluationRuleActionType, + EvaluationTaxonomyInputType, + EvaluatorDefinitionType, + IndexType, + InsightType, + ItemContentType, + ItemType, + LocationType, + MemoryItemKind, + MemoryStoreKind, + OpenApiAuthType, + PendingUploadType, + ReasoningItemSummaryPartType, + RecurrenceType, + ResponseStreamEventType, + ResponseTextFormatConfigurationType, + ResponsesMessageRole, + SampleType, + ScheduleTaskType, + ToolChoiceObjectType, + ToolType, + TriggerType, + WebSearchActionType, +) + +if TYPE_CHECKING: + from .. import _types, models as _models # type: ignore + + +class Tool(_Model): + """Tool. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + A2ATool, AzureAISearchAgentTool, AzureFunctionAgentTool, BingCustomSearchAgentTool, + BingGroundingAgentTool, BrowserAutomationAgentTool, CaptureStructuredOutputsTool, + CodeInterpreterTool, ComputerUsePreviewTool, MicrosoftFabricAgentTool, FileSearchTool, + FunctionTool, ImageGenTool, LocalShellTool, MCPTool, MemorySearchTool, OpenApiAgentTool, + SharepointAgentTool, WebSearchPreviewTool + + :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", + "web_search_preview", "mcp", "code_interpreter", "image_generation", "local_shell", + "bing_grounding", "browser_automation_preview", "fabric_dataagent_preview", + "sharepoint_grounding_preview", "azure_ai_search", "openapi", "bing_custom_search_preview", + "capture_structured_outputs", "a2a_preview", "azure_function", and "memory_search". + :vartype type: str or ~azure.ai.projects.models.ToolType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", + \"web_search_preview\", \"mcp\", \"code_interpreter\", \"image_generation\", \"local_shell\", + \"bing_grounding\", \"browser_automation_preview\", \"fabric_dataagent_preview\", + \"sharepoint_grounding_preview\", \"azure_ai_search\", \"openapi\", + \"bing_custom_search_preview\", \"capture_structured_outputs\", \"a2a_preview\", + \"azure_function\", and \"memory_search\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class A2ATool(Tool, discriminator="a2a_preview"): + """An agent implementing the A2A protocol. + + :ivar type: The type of the tool. Always ``a2a``. Required. + :vartype type: str or ~azure.ai.projects.models.A2_A_PREVIEW + :ivar base_url: Base URL of the agent. + :vartype base_url: str + :ivar agent_card_path: The path to the agent card relative to the ``base_url``. + If not provided, defaults to ``/.well-known/agent-card.json``. + :vartype agent_card_path: str + :ivar project_connection_id: The connection ID in the project for the A2A server. + The connection stores authentication and other connection details needed to connect to the A2A + server. + :vartype project_connection_id: str + """ + + type: Literal[ToolType.A2_A_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``a2a``. Required.""" + base_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base URL of the agent.""" + agent_card_path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The path to the agent card relative to the ``base_url``. + If not provided, defaults to ``/.well-known/agent-card.json``.""" + project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The connection ID in the project for the A2A server. + The connection stores authentication and other connection details needed to connect to the A2A + server.""" + + @overload + def __init__( + self, + *, + base_url: Optional[str] = None, + agent_card_path: Optional[str] = None, + project_connection_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.A2_A_PREVIEW # type: ignore + + +class InsightResult(_Model): + """The result of the insights. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AgentClusterInsightResult, EvalCompareReport, EvaluationRunClusterInsightResult + + :ivar type: The type of insights result. Required. Known values are: + "EvaluationRunClusterInsight", "AgentClusterInsight", and "EvaluationComparison". + :vartype type: str or ~azure.ai.projects.models.InsightType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of insights result. Required. Known values are: \"EvaluationRunClusterInsight\", + \"AgentClusterInsight\", and \"EvaluationComparison\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentClusterInsightResult(InsightResult, discriminator="AgentClusterInsight"): + """Insights from the agent cluster analysis. + + :ivar type: The type of insights result. Required. Cluster Insight on an Agent. + :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT + :ivar cluster_insight: Required. + :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult + """ + + type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Cluster Insight on an Agent.""" + cluster_insight: "_models.ClusterInsightResult" = rest_field( + name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + cluster_insight: "_models.ClusterInsightResult", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.AGENT_CLUSTER_INSIGHT # type: ignore + + +class InsightRequest(_Model): + """The request of the insights report. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AgentClusterInsightsRequest, EvaluationComparisonRequest, EvaluationRunClusterInsightsRequest + + :ivar type: The type of request. Required. Known values are: "EvaluationRunClusterInsight", + "AgentClusterInsight", and "EvaluationComparison". + :vartype type: str or ~azure.ai.projects.models.InsightType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of request. Required. Known values are: \"EvaluationRunClusterInsight\", + \"AgentClusterInsight\", and \"EvaluationComparison\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentClusterInsightsRequest(InsightRequest, discriminator="AgentClusterInsight"): + """Insights on set of Agent Evaluation Results. + + :ivar type: The type of request. Required. Cluster Insight on an Agent. + :vartype type: str or ~azure.ai.projects.models.AGENT_CLUSTER_INSIGHT + :ivar agent_name: Identifier for the agent. Required. + :vartype agent_name: str + :ivar model_configuration: Configuration of the model used in the insight generation. + :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration + """ + + type: Literal[InsightType.AGENT_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of request. Required. Cluster Insight on an Agent.""" + agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) + """Identifier for the agent. Required.""" + model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( + name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration of the model used in the insight generation.""" + + @overload + def __init__( + self, + *, + agent_name: str, + model_configuration: Optional["_models.InsightModelConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.AGENT_CLUSTER_INSIGHT # type: ignore + + +class AgentContainerObject(_Model): + """The details of the container of a specific version of an agent. + + :ivar object: The object type, which is always 'agent.container'. Required. Default value is + "agent.container". + :vartype object: str + :ivar status: The status of the container of a specific version of an agent. Required. Known + values are: "Starting", "Running", "Stopping", "Stopped", "Failed", "Deleting", "Deleted", and + "Updating". + :vartype status: str or ~azure.ai.projects.models.AgentContainerStatus + :ivar max_replicas: The maximum number of replicas for the container. Default is 1. + :vartype max_replicas: int + :ivar min_replicas: The minimum number of replicas for the container. Default is 1. + :vartype min_replicas: int + :ivar error_message: The error message if the container failed to operate, if any. + :vartype error_message: str + :ivar created_at: The creation time of the container. Required. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The last update time of the container. Required. + :vartype updated_at: ~datetime.datetime + """ + + object: Literal["agent.container"] = rest_field(visibility=["read"]) + """The object type, which is always 'agent.container'. Required. Default value is + \"agent.container\".""" + status: Union[str, "_models.AgentContainerStatus"] = rest_field(visibility=["read"]) + """The status of the container of a specific version of an agent. Required. Known values are: + \"Starting\", \"Running\", \"Stopping\", \"Stopped\", \"Failed\", \"Deleting\", \"Deleted\", + and \"Updating\".""" + max_replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of replicas for the container. Default is 1.""" + min_replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The minimum number of replicas for the container. Default is 1.""" + error_message: Optional[str] = rest_field(visibility=["read"]) + """The error message if the container failed to operate, if any.""" + created_at: datetime.datetime = rest_field(visibility=["read"], format="rfc3339") + """The creation time of the container. Required.""" + updated_at: datetime.datetime = rest_field(visibility=["read"], format="rfc3339") + """The last update time of the container. Required.""" + + @overload + def __init__( + self, + *, + max_replicas: Optional[int] = None, + min_replicas: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.container"] = "agent.container" + + +class AgentContainerOperationError(_Model): + """The error details of the container operation, if any. + + :ivar code: The error code of the container operation, if any. Required. + :vartype code: str + :ivar type: The error type of the container operation, if any. Required. + :vartype type: str + :ivar message: The error message of the container operation, if any. Required. + :vartype message: str + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code of the container operation, if any. Required.""" + type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error type of the container operation, if any. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error message of the container operation, if any. Required.""" + + @overload + def __init__( + self, + *, + code: str, + type: str, + message: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentContainerOperationObject(_Model): + """The container operation for a specific version of an agent. + + :ivar id: The ID of the container operation. This id is unique identifier across the system. + Required. + :vartype id: str + :ivar agent_id: The ID of the agent. Required. + :vartype agent_id: str + :ivar agent_version_id: The ID of the agent version. Required. + :vartype agent_version_id: str + :ivar status: The status of the container operation. Required. Known values are: "NotStarted", + "InProgress", "Succeeded", and "Failed". + :vartype status: str or ~azure.ai.projects.models.AgentContainerOperationStatus + :ivar error: The error of the container operation, if any. + :vartype error: ~azure.ai.projects.models.AgentContainerOperationError + :ivar container: The container of the specific version of an agent. + :vartype container: ~azure.ai.projects.models.AgentContainerObject + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container operation. This id is unique identifier across the system. Required.""" + agent_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the agent. Required.""" + agent_version_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the agent version. Required.""" + status: Union[str, "_models.AgentContainerOperationStatus"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the container operation. Required. Known values are: \"NotStarted\", + \"InProgress\", \"Succeeded\", and \"Failed\".""" + error: Optional["_models.AgentContainerOperationError"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The error of the container operation, if any.""" + container: Optional["_models.AgentContainerObject"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The container of the specific version of an agent.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + agent_id: str, + agent_version_id: str, + status: Union[str, "_models.AgentContainerOperationStatus"], + error: Optional["_models.AgentContainerOperationError"] = None, + container: Optional["_models.AgentContainerObject"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentDefinition(_Model): + """AgentDefinition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ContainerAppAgentDefinition, HostedAgentDefinition, PromptAgentDefinition, WorkflowDefinition + + :ivar kind: Required. Known values are: "prompt", "hosted", "container_app", and "workflow". + :vartype kind: str or ~azure.ai.projects.models.AgentKind + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + """ + + __mapping__: dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"prompt\", \"hosted\", \"container_app\", and \"workflow\".""" + rai_config: Optional["_models.RaiConfig"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Configuration for Responsible AI (RAI) content filtering and safety features.""" + + @overload + def __init__( + self, + *, + kind: str, + rai_config: Optional["_models.RaiConfig"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BaseCredentials(_Model): + """A base class for connection credentials. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EntraIDCredentials, AgenticIdentityCredentials, ApiKeyCredentials, CustomCredential, + NoAuthenticationCredentials, SASCredentials + + :ivar type: The type of credential used by the connection. Required. Known values are: + "ApiKey", "AAD", "SAS", "CustomKeys", "None", and "AgenticIdentityToken". + :vartype type: str or ~azure.ai.projects.models.CredentialType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read"]) + """The type of credential used by the connection. Required. Known values are: \"ApiKey\", \"AAD\", + \"SAS\", \"CustomKeys\", \"None\", and \"AgenticIdentityToken\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgenticIdentityCredentials(BaseCredentials, discriminator="AgenticIdentityToken"): + """Agentic identity credential definition. + + :ivar type: The credential type. Required. Agentic identity credential + :vartype type: str or ~azure.ai.projects.models.AGENTIC_IDENTITY + """ + + type: Literal[CredentialType.AGENTIC_IDENTITY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Agentic identity credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.AGENTIC_IDENTITY # type: ignore + + +class AgentId(_Model): + """AgentId. + + :ivar type: Required. Default value is "agent_id". + :vartype type: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar version: The version identifier of the agent. Required. + :vartype version: str + """ + + type: Literal["agent_id"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. Default value is \"agent_id\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent. Required.""" + + @overload + def __init__( + self, + *, + name: str, + version: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["agent_id"] = "agent_id" + + +class AgentObject(_Model): + """AgentObject. + + :ivar object: The object type, which is always 'agent'. Required. Default value is "agent". + :vartype object: str + :ivar id: The unique identifier of the agent. Required. + :vartype id: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar versions: The latest version of the agent. Required. + :vartype versions: ~azure.ai.projects.models.AgentObjectVersions + """ + + object: Literal["agent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'agent'. Required. Default value is \"agent\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the agent. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + versions: "_models.AgentObjectVersions" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The latest version of the agent. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + versions: "_models.AgentObjectVersions", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent"] = "agent" + + +class AgentObjectVersions(_Model): + """AgentObjectVersions. + + :ivar latest: Required. + :vartype latest: ~azure.ai.projects.models.AgentVersionObject + """ + + latest: "_models.AgentVersionObject" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + latest: "_models.AgentVersionObject", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentReference(_Model): + """AgentReference. + + :ivar type: Required. Default value is "agent_reference". + :vartype type: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar version: The version identifier of the agent. + :vartype version: str + """ + + type: Literal["agent_reference"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required. Default value is \"agent_reference\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent.""" + + @overload + def __init__( + self, + *, + name: str, + version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["agent_reference"] = "agent_reference" + + +class EvaluationTaxonomyInput(_Model): + """Input configuration for the evaluation taxonomy. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AgentTaxonomyInput + + :ivar type: Input type of the evaluation taxonomy. Required. Known values are: "agent" and + "policy". + :vartype type: str or ~azure.ai.projects.models.EvaluationTaxonomyInputType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Input type of the evaluation taxonomy. Required. Known values are: \"agent\" and \"policy\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AgentTaxonomyInput(EvaluationTaxonomyInput, discriminator="agent"): + """Input configuration for the evaluation taxonomy when the input type is agent. + + :ivar type: Input type of the evaluation taxonomy. Required. Agent + :vartype type: str or ~azure.ai.projects.models.AGENT + :ivar target: Target configuration for the agent. Required. + :vartype target: ~azure.ai.projects.models.AzureAIAgentTarget + :ivar risk_categories: List of risk categories to evaluate against. Required. + :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] + """ + + type: Literal[EvaluationTaxonomyInputType.AGENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Input type of the evaluation taxonomy. Required. Agent""" + target: "_models.AzureAIAgentTarget" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Target configuration for the agent. Required.""" + risk_categories: list[Union[str, "_models.RiskCategory"]] = rest_field( + name="riskCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of risk categories to evaluate against. Required.""" + + @overload + def __init__( + self, + *, + target: "_models.AzureAIAgentTarget", + risk_categories: list[Union[str, "_models.RiskCategory"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluationTaxonomyInputType.AGENT # type: ignore + + +class AgentVersionObject(_Model): + """AgentVersionObject. + + :ivar metadata: Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. + Keys are strings with a maximum length of 64 characters. Values are strings + with a maximum length of 512 characters. Required. + :vartype metadata: dict[str, str] + :ivar object: The object type, which is always 'agent.version'. Required. Default value is + "agent.version". + :vartype object: str + :ivar id: The unique identifier of the agent version. Required. + :vartype id: str + :ivar name: The name of the agent. Name can be used to retrieve/update/delete the agent. + Required. + :vartype name: str + :ivar version: The version identifier of the agent. Agents are immutable and every update + creates a new version while keeping the name same. Required. + :vartype version: str + :ivar description: A human-readable description of the agent. + :vartype description: str + :ivar created_at: The Unix timestamp (seconds) when the agent was created. Required. + :vartype created_at: ~datetime.datetime + :ivar definition: Required. + :vartype definition: ~azure.ai.projects.models.AgentDefinition + """ + + metadata: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. + Keys are strings with a maximum length of 64 characters. Values are strings + with a maximum length of 512 characters. Required.""" + object: Literal["agent.version"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'agent.version'. Required. Default value is \"agent.version\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the agent version. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Name can be used to retrieve/update/delete the agent. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent. Agents are immutable and every update creates a new + version while keeping the name same. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the agent.""" + created_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the agent was created. Required.""" + definition: "_models.AgentDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + metadata: dict[str, str], + id: str, # pylint: disable=redefined-builtin + name: str, + version: str, + created_at: datetime.datetime, + definition: "_models.AgentDefinition", + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.version"] = "agent.version" + + +class AISearchIndexResource(_Model): + """A AI Search Index resource. + + :ivar project_connection_id: An index connection ID in an IndexResource attached to this agent. + Required. + :vartype project_connection_id: str + :ivar index_name: The name of an index in an IndexResource attached to this agent. + :vartype index_name: str + :ivar query_type: Type of query in an AIIndexResource attached to this agent. Known values are: + "simple", "semantic", "vector", "vector_simple_hybrid", and "vector_semantic_hybrid". + :vartype query_type: str or ~azure.ai.projects.models.AzureAISearchQueryType + :ivar top_k: Number of documents to retrieve from search and present to the model. + :vartype top_k: int + :ivar filter: filter string for search resource. Learn more from here: + `https://learn.microsoft.com/azure/search/search-filters + `_. + :vartype filter: str + :ivar index_asset_id: Index asset id for search resource. + :vartype index_asset_id: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An index connection ID in an IndexResource attached to this agent. Required.""" + index_name: Optional[str] = rest_field(name="indexName", visibility=["read", "create", "update", "delete", "query"]) + """The name of an index in an IndexResource attached to this agent.""" + query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = rest_field( + name="queryType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of query in an AIIndexResource attached to this agent. Known values are: \"simple\", + \"semantic\", \"vector\", \"vector_simple_hybrid\", and \"vector_semantic_hybrid\".""" + top_k: Optional[int] = rest_field(name="topK", visibility=["read", "create", "update", "delete", "query"]) + """Number of documents to retrieve from search and present to the model.""" + filter: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """filter string for search resource. Learn more from here: + `https://learn.microsoft.com/azure/search/search-filters + `_.""" + index_asset_id: Optional[str] = rest_field( + name="indexAssetId", visibility=["read", "create", "update", "delete", "query"] + ) + """Index asset id for search resource.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + index_name: Optional[str] = None, + query_type: Optional[Union[str, "_models.AzureAISearchQueryType"]] = None, + top_k: Optional[int] = None, + filter: Optional[str] = None, # pylint: disable=redefined-builtin + index_asset_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Annotation(_Model): + """Annotation. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AnnotationFileCitation, AnnotationFilePath, AnnotationUrlCitation + + :ivar type: Required. Known values are: "file_citation", "url_citation", "file_path", and + "container_file_citation". + :vartype type: str or ~azure.ai.projects.models.AnnotationType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"file_citation\", \"url_citation\", \"file_path\", and + \"container_file_citation\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AnnotationFileCitation(Annotation, discriminator="file_citation"): + """A citation to a file. + + :ivar type: The type of the file citation. Always ``file_citation``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_CITATION + :ivar file_id: The ID of the file. Required. + :vartype file_id: str + :ivar index: The index of the file in the list of files. Required. + :vartype index: int + :ivar filename: The filename of the file cited. Required. + :vartype filename: str + """ + + type: Literal[AnnotationType.FILE_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file citation. Always ``file_citation``. Required.""" + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file. Required.""" + index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the file in the list of files. Required.""" + filename: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The filename of the file cited. Required.""" + + @overload + def __init__( + self, + *, + file_id: str, + index: int, + filename: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = AnnotationType.FILE_CITATION # type: ignore + + +class AnnotationFilePath(Annotation, discriminator="file_path"): + """A path to a file. + + :ivar type: The type of the file path. Always ``file_path``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_PATH + :ivar file_id: The ID of the file. Required. + :vartype file_id: str + :ivar index: The index of the file in the list of files. Required. + :vartype index: int + """ + + type: Literal[AnnotationType.FILE_PATH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file path. Always ``file_path``. Required.""" + file_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file. Required.""" + index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the file in the list of files. Required.""" + + @overload + def __init__( + self, + *, + file_id: str, + index: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = AnnotationType.FILE_PATH # type: ignore + + +class AnnotationUrlCitation(Annotation, discriminator="url_citation"): + """A citation for a web resource used to generate a model response. + + :ivar type: The type of the URL citation. Always ``url_citation``. Required. + :vartype type: str or ~azure.ai.projects.models.URL_CITATION + :ivar url: The URL of the web resource. Required. + :vartype url: str + :ivar start_index: The index of the first character of the URL citation in the message. + Required. + :vartype start_index: int + :ivar end_index: The index of the last character of the URL citation in the message. Required. + :vartype end_index: int + :ivar title: The title of the web resource. Required. + :vartype title: str + """ + + type: Literal[AnnotationType.URL_CITATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the URL citation. Always ``url_citation``. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the web resource. Required.""" + start_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the first character of the URL citation in the message. Required.""" + end_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the last character of the URL citation in the message. Required.""" + title: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The title of the web resource. Required.""" + + @overload + def __init__( + self, + *, + url: str, + start_index: int, + end_index: int, + title: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = AnnotationType.URL_CITATION # type: ignore + + +class ApiError(_Model): + """ApiError. + + :ivar code: The error code. Required. + :vartype code: str + :ivar message: A human-readable description of the error. Required. + :vartype message: str + :ivar target: The target of the error, if applicable. + :vartype target: str + :ivar details: Additional details about the error. Required. + :vartype details: list[~azure.ai.projects.models.ApiError] + :ivar innererror: The inner error, if any. + :vartype innererror: ~azure.ai.projects.models.ApiInnerError + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the error. Required.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The target of the error, if applicable.""" + details: list["_models.ApiError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional details about the error. Required.""" + innererror: Optional["_models.ApiInnerError"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The inner error, if any.""" + + @overload + def __init__( + self, + *, + code: str, + message: str, + details: list["_models.ApiError"], + target: Optional[str] = None, + innererror: Optional["_models.ApiInnerError"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApiErrorResponse(_Model): + """Error response for API failures. + + :ivar error: Required. + :vartype error: ~azure.ai.projects.models.ApiError + """ + + error: "_models.ApiError" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + error: "_models.ApiError", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApiInnerError(_Model): + """ApiInnerError. + + :ivar code: The error code. Required. + :vartype code: str + :ivar innererror: The inner error, if any. + :vartype innererror: ~azure.ai.projects.models.ApiInnerError + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code. Required.""" + innererror: Optional["_models.ApiInnerError"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The inner error, if any.""" + + @overload + def __init__( + self, + *, + code: str, + innererror: Optional["_models.ApiInnerError"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): + """API Key Credential definition. + + :ivar type: The credential type. Required. API Key credential + :vartype type: str or ~azure.ai.projects.models.API_KEY + :ivar api_key: API Key. + :vartype api_key: str + """ + + type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. API Key credential""" + api_key: Optional[str] = rest_field(name="key", visibility=["read"]) + """API Key.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.API_KEY # type: ignore + + +class Location(_Model): + """Location. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ApproximateLocation + + :ivar type: Required. "approximate" + :vartype type: str or ~azure.ai.projects.models.LocationType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. \"approximate\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ApproximateLocation(Location, discriminator="approximate"): + """ApproximateLocation. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.APPROXIMATE + :ivar country: + :vartype country: str + :ivar region: + :vartype region: str + :ivar city: + :vartype city: str + :ivar timezone: + :vartype timezone: str + """ + + type: Literal[LocationType.APPROXIMATE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + country: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + region: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + city: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + timezone: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + country: Optional[str] = None, + region: Optional[str] = None, + city: Optional[str] = None, + timezone: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = LocationType.APPROXIMATE # type: ignore + + +class Target(_Model): + """Base class for targets with discriminator support. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureAIAgentTarget, AzureAIAssistantTarget, AzureAIModelTarget + + :ivar type: The type of target. Required. Default value is None. + :vartype type: str + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of target. Required. Default value is None.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureAIAgentTarget(Target, discriminator="azure_ai_agent"): + """Represents a target specifying an Azure AI agent. + + :ivar type: The type of target, always ``azure_ai_agent``. Required. Default value is + "azure_ai_agent". + :vartype type: str + :ivar name: The unique identifier of the Azure AI agent. Required. + :vartype name: str + :ivar version: The version of the Azure AI agent. + :vartype version: str + :ivar tool_descriptions: The parameters used to control the sampling behavior of the agent + during text generation. + :vartype tool_descriptions: list[~azure.ai.projects.models.ToolDescription] + """ + + type: Literal["azure_ai_agent"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of target, always ``azure_ai_agent``. Required. Default value is \"azure_ai_agent\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the Azure AI agent. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version of the Azure AI agent.""" + tool_descriptions: Optional[list["_models.ToolDescription"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The parameters used to control the sampling behavior of the agent during text generation.""" + + @overload + def __init__( + self, + *, + name: str, + version: Optional[str] = None, + tool_descriptions: Optional[list["_models.ToolDescription"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = "azure_ai_agent" # type: ignore + + +class AzureAISearchAgentTool(Tool, discriminator="azure_ai_search"): + """The input definition information for an Azure AI search tool as used to configure an agent. + + :ivar type: The object type, which is always 'azure_ai_search'. Required. + :vartype type: str or ~azure.ai.projects.models.AZURE_AI_SEARCH + :ivar azure_ai_search: The azure ai search index resource. Required. + :vartype azure_ai_search: ~azure.ai.projects.models.AzureAISearchToolResource + """ + + type: Literal[ToolType.AZURE_AI_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'azure_ai_search'. Required.""" + azure_ai_search: "_models.AzureAISearchToolResource" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The azure ai search index resource. Required.""" + + @overload + def __init__( + self, + *, + azure_ai_search: "_models.AzureAISearchToolResource", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.AZURE_AI_SEARCH # type: ignore + + +class Index(_Model): + """Index resource Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureAISearchIndex, CosmosDBIndex, ManagedAzureAISearchIndex + + :ivar type: Type of index. Required. Known values are: "AzureSearch", + "CosmosDBNoSqlVectorStore", and "ManagedAzureSearch". + :vartype type: str or ~azure.ai.projects.models.IndexType + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of index. Required. Known values are: \"AzureSearch\", \"CosmosDBNoSqlVectorStore\", and + \"ManagedAzureSearch\".""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + type: str, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureAISearchIndex(Index, discriminator="AzureSearch"): + """Azure AI Search Index Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. Azure search + :vartype type: str or ~azure.ai.projects.models.AZURE_SEARCH + :ivar connection_name: Name of connection to Azure AI Search. Required. + :vartype connection_name: str + :ivar index_name: Name of index in Azure AI Search resource to attach. Required. + :vartype index_name: str + :ivar field_mapping: Field mapping configuration. + :vartype field_mapping: ~azure.ai.projects.models.FieldMapping + """ + + type: Literal[IndexType.AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. Azure search""" + connection_name: str = rest_field(name="connectionName", visibility=["create"]) + """Name of connection to Azure AI Search. Required.""" + index_name: str = rest_field(name="indexName", visibility=["create"]) + """Name of index in Azure AI Search resource to attach. Required.""" + field_mapping: Optional["_models.FieldMapping"] = rest_field(name="fieldMapping", visibility=["create"]) + """Field mapping configuration.""" + + @overload + def __init__( + self, + *, + connection_name: str, + index_name: str, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + field_mapping: Optional["_models.FieldMapping"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = IndexType.AZURE_SEARCH # type: ignore + + +class AzureAISearchToolResource(_Model): + """A set of index resources used by the ``azure_ai_search`` tool. + + :ivar index_list: The indices attached to this agent. There can be a maximum of 1 index + resource attached to the agent. + :vartype index_list: list[~azure.ai.projects.models.AISearchIndexResource] + """ + + index_list: Optional[list["_models.AISearchIndexResource"]] = rest_field( + name="indexList", visibility=["read", "create", "update", "delete", "query"] + ) + """The indices attached to this agent. There can be a maximum of 1 index + resource attached to the agent.""" + + @overload + def __init__( + self, + *, + index_list: Optional[list["_models.AISearchIndexResource"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureFunctionAgentTool(Tool, discriminator="azure_function"): + """The input definition information for an Azure Function Tool, as used to configure an Agent. + + :ivar type: The object type, which is always 'browser_automation'. Required. + :vartype type: str or ~azure.ai.projects.models.AZURE_FUNCTION + :ivar azure_function: The Azure Function Tool definition. Required. + :vartype azure_function: ~azure.ai.projects.models.AzureFunctionDefinition + """ + + type: Literal[ToolType.AZURE_FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'browser_automation'. Required.""" + azure_function: "_models.AzureFunctionDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The Azure Function Tool definition. Required.""" + + @overload + def __init__( + self, + *, + azure_function: "_models.AzureFunctionDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.AZURE_FUNCTION # type: ignore + + +class AzureFunctionBinding(_Model): + """The structure for keeping storage queue name and URI. + + :ivar type: The type of binding, which is always 'storage_queue'. Required. Default value is + "storage_queue". + :vartype type: str + :ivar storage_queue: Storage queue. Required. + :vartype storage_queue: ~azure.ai.projects.models.AzureFunctionStorageQueue + """ + + type: Literal["storage_queue"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of binding, which is always 'storage_queue'. Required. Default value is + \"storage_queue\".""" + storage_queue: "_models.AzureFunctionStorageQueue" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Storage queue. Required.""" + + @overload + def __init__( + self, + *, + storage_queue: "_models.AzureFunctionStorageQueue", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["storage_queue"] = "storage_queue" + + +class AzureFunctionDefinition(_Model): + """The definition of Azure function. + + :ivar function: The definition of azure function and its parameters. Required. + :vartype function: ~azure.ai.projects.models.AzureFunctionDefinitionFunction + :ivar input_binding: Input storage queue. The queue storage trigger runs a function as messages + are added to it. Required. + :vartype input_binding: ~azure.ai.projects.models.AzureFunctionBinding + :ivar output_binding: Output storage queue. The function writes output to this queue when the + input items are processed. Required. + :vartype output_binding: ~azure.ai.projects.models.AzureFunctionBinding + """ + + function: "_models.AzureFunctionDefinitionFunction" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The definition of azure function and its parameters. Required.""" + input_binding: "_models.AzureFunctionBinding" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Input storage queue. The queue storage trigger runs a function as messages are added to it. + Required.""" + output_binding: "_models.AzureFunctionBinding" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Output storage queue. The function writes output to this queue when the input items are + processed. Required.""" + + @overload + def __init__( + self, + *, + function: "_models.AzureFunctionDefinitionFunction", + input_binding: "_models.AzureFunctionBinding", + output_binding: "_models.AzureFunctionBinding", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureFunctionDefinitionFunction(_Model): + """AzureFunctionDefinitionFunction. + + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. + Required. + :vartype parameters: any + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The parameters the functions accepts, described as a JSON Schema object. Required.""" + + @overload + def __init__( + self, + *, + name: str, + parameters: Any, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureFunctionStorageQueue(_Model): + """The structure for keeping storage queue name and URI. + + :ivar queue_service_endpoint: URI to the Azure Storage Queue service allowing you to manipulate + a queue. Required. + :vartype queue_service_endpoint: str + :ivar queue_name: The name of an Azure function storage queue. Required. + :vartype queue_name: str + """ + + queue_service_endpoint: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """URI to the Azure Storage Queue service allowing you to manipulate a queue. Required.""" + queue_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of an Azure function storage queue. Required.""" + + @overload + def __init__( + self, + *, + queue_service_endpoint: str, + queue_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TargetConfig(_Model): + """Abstract class for target configuration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureOpenAIModelConfiguration + + :ivar type: Type of the model configuration. Required. Default value is None. + :vartype type: str + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the model configuration. Required. Default value is None.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AzureOpenAIModelConfiguration(TargetConfig, discriminator="AzureOpenAIModel"): + """Azure OpenAI model configuration. The API version would be selected by the service for querying + the model. + + :ivar type: Required. Default value is "AzureOpenAIModel". + :vartype type: str + :ivar model_deployment_name: Deployment name for AOAI model. Example: gpt-4o if in AIServices + or connection based ``connection_name/deployment_name`` (e.g. ``my-aoai-connection/gpt-4o``). + Required. + :vartype model_deployment_name: str + """ + + type: Literal["AzureOpenAIModel"] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Default value is \"AzureOpenAIModel\".""" + model_deployment_name: str = rest_field( + name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] + ) + """Deployment name for AOAI model. Example: gpt-4o if in AIServices or connection based + ``connection_name/deployment_name`` (e.g. ``my-aoai-connection/gpt-4o``). Required.""" + + @overload + def __init__( + self, + *, + model_deployment_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = "AzureOpenAIModel" # type: ignore + + +class BingCustomSearchAgentTool(Tool, discriminator="bing_custom_search_preview"): + """The input definition information for a Bing custom search tool as used to configure an agent. + + :ivar type: The object type, which is always 'bing_custom_search'. Required. + :vartype type: str or ~azure.ai.projects.models.BING_CUSTOM_SEARCH_PREVIEW + :ivar bing_custom_search_preview: The bing custom search tool parameters. Required. + :vartype bing_custom_search_preview: ~azure.ai.projects.models.BingCustomSearchToolParameters + """ + + type: Literal[ToolType.BING_CUSTOM_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_custom_search'. Required.""" + bing_custom_search_preview: "_models.BingCustomSearchToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The bing custom search tool parameters. Required.""" + + @overload + def __init__( + self, + *, + bing_custom_search_preview: "_models.BingCustomSearchToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.BING_CUSTOM_SEARCH_PREVIEW # type: ignore + + +class BingCustomSearchConfiguration(_Model): + """A bing custom search configuration. + + :ivar project_connection_id: Project connection id for grounding with bing search. Required. + :vartype project_connection_id: str + :ivar instance_name: Name of the custom configuration instance given to config. Required. + :vartype instance_name: str + :ivar market: The market where the results come from. + :vartype market: str + :ivar set_lang: The language to use for user interface strings when calling Bing API. + :vartype set_lang: str + :ivar count: The number of search results to return in the bing api response. + :vartype count: int + :ivar freshness: Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_. + :vartype freshness: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for grounding with bing search. Required.""" + instance_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the custom configuration instance given to config. Required.""" + market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The market where the results come from.""" + set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The language to use for user interface strings when calling Bing API.""" + count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of search results to return in the bing api response.""" + freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + instance_name: str, + market: Optional[str] = None, + set_lang: Optional[str] = None, + count: Optional[int] = None, + freshness: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BingCustomSearchToolParameters(_Model): + """The bing custom search tool parameters. + + :ivar search_configurations: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. Required. + :vartype search_configurations: list[~azure.ai.projects.models.BingCustomSearchConfiguration] + """ + + search_configurations: list["_models.BingCustomSearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" + + @overload + def __init__( + self, + *, + search_configurations: list["_models.BingCustomSearchConfiguration"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BingGroundingAgentTool(Tool, discriminator="bing_grounding"): + """The input definition information for a bing grounding search tool as used to configure an + agent. + + :ivar type: The object type, which is always 'bing_grounding'. Required. + :vartype type: str or ~azure.ai.projects.models.BING_GROUNDING + :ivar bing_grounding: The bing grounding search tool parameters. Required. + :vartype bing_grounding: ~azure.ai.projects.models.BingGroundingSearchToolParameters + """ + + type: Literal[ToolType.BING_GROUNDING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'bing_grounding'. Required.""" + bing_grounding: "_models.BingGroundingSearchToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The bing grounding search tool parameters. Required.""" + + @overload + def __init__( + self, + *, + bing_grounding: "_models.BingGroundingSearchToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.BING_GROUNDING # type: ignore + + +class BingGroundingSearchConfiguration(_Model): + """Search configuration for Bing Grounding. + + :ivar project_connection_id: Project connection id for grounding with bing search. Required. + :vartype project_connection_id: str + :ivar market: The market where the results come from. + :vartype market: str + :ivar set_lang: The language to use for user interface strings when calling Bing API. + :vartype set_lang: str + :ivar count: The number of search results to return in the bing api response. + :vartype count: int + :ivar freshness: Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_. + :vartype freshness: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for grounding with bing search. Required.""" + market: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The market where the results come from.""" + set_lang: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The language to use for user interface strings when calling Bing API.""" + count: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of search results to return in the bing api response.""" + freshness: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Filter search results by a specific time range. Accepted values: + `https://learn.microsoft.com/bing/search-apis/bing-web-search/reference/query-parameters + `_.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + market: Optional[str] = None, + set_lang: Optional[str] = None, + count: Optional[int] = None, + freshness: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BingGroundingSearchToolParameters(_Model): + """The bing grounding search tool parameters. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. Required. + :vartype project_connections: ~azure.ai.projects.models.ToolProjectConnectionList + :ivar search_configurations: The search configurations attached to this tool. There can be a + maximum of 1 + search configuration resource attached to the tool. Required. + :vartype search_configurations: + list[~azure.ai.projects.models.BingGroundingSearchConfiguration] + """ + + project_connections: "_models.ToolProjectConnectionList" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool. Required.""" + search_configurations: list["_models.BingGroundingSearchConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The search configurations attached to this tool. There can be a maximum of 1 + search configuration resource attached to the tool. Required.""" + + @overload + def __init__( + self, + *, + project_connections: "_models.ToolProjectConnectionList", + search_configurations: list["_models.BingGroundingSearchConfiguration"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobReference(_Model): + """Blob reference details. + + :ivar blob_uri: Blob URI path for client to upload data. Example: + `https://blob.windows.core.net/Container/Path `_. + Required. + :vartype blob_uri: str + :ivar storage_account_arm_id: ARM ID of the storage account to use. Required. + :vartype storage_account_arm_id: str + :ivar credential: Credential info to access the storage account. Required. + :vartype credential: ~azure.ai.projects.models.BlobReferenceSasCredential + """ + + blob_uri: str = rest_field(name="blobUri", visibility=["read", "create", "update", "delete", "query"]) + """Blob URI path for client to upload data. Example: `https://blob.windows.core.net/Container/Path + `_. Required.""" + storage_account_arm_id: str = rest_field( + name="storageAccountArmId", visibility=["read", "create", "update", "delete", "query"] + ) + """ARM ID of the storage account to use. Required.""" + credential: "_models.BlobReferenceSasCredential" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Credential info to access the storage account. Required.""" + + @overload + def __init__( + self, + *, + blob_uri: str, + storage_account_arm_id: str, + credential: "_models.BlobReferenceSasCredential", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BlobReferenceSasCredential(_Model): + """SAS Credential definition. + + :ivar sas_uri: SAS uri. Required. + :vartype sas_uri: str + :ivar type: Type of credential. Required. Default value is "SAS". + :vartype type: str + """ + + sas_uri: str = rest_field(name="sasUri", visibility=["read"]) + """SAS uri. Required.""" + type: Literal["SAS"] = rest_field(visibility=["read"]) + """Type of credential. Required. Default value is \"SAS\".""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["SAS"] = "SAS" + + +class BrowserAutomationAgentTool(Tool, discriminator="browser_automation_preview"): + """The input definition information for a Browser Automation Tool, as used to configure an Agent. + + :ivar type: The object type, which is always 'browser_automation'. Required. + :vartype type: str or ~azure.ai.projects.models.BROWSER_AUTOMATION_PREVIEW + :ivar browser_automation_preview: The Browser Automation Tool parameters. Required. + :vartype browser_automation_preview: ~azure.ai.projects.models.BrowserAutomationToolParameters + """ + + type: Literal[ToolType.BROWSER_AUTOMATION_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'browser_automation'. Required.""" + browser_automation_preview: "_models.BrowserAutomationToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The Browser Automation Tool parameters. Required.""" + + @overload + def __init__( + self, + *, + browser_automation_preview: "_models.BrowserAutomationToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.BROWSER_AUTOMATION_PREVIEW # type: ignore + + +class BrowserAutomationToolConnectionParameters(_Model): # pylint: disable=name-too-long + """Definition of input parameters for the connection used by the Browser Automation Tool. + + :ivar id: The ID of the project connection to your Azure Playwright resource. Required. + :vartype id: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the project connection to your Azure Playwright resource. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrowserAutomationToolParameters(_Model): + """Definition of input parameters for the Browser Automation Tool. + + :ivar project_connection: The project connection parameters associated with the Browser + Automation Tool. Required. + :vartype project_connection: + ~azure.ai.projects.models.BrowserAutomationToolConnectionParameters + """ + + project_connection: "_models.BrowserAutomationToolConnectionParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connection parameters associated with the Browser Automation Tool. Required.""" + + @overload + def __init__( + self, + *, + project_connection: "_models.BrowserAutomationToolConnectionParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CaptureStructuredOutputsTool(Tool, discriminator="capture_structured_outputs"): + """A tool for capturing structured outputs. + + :ivar type: The type of the tool. Always ``capture_structured_outputs``. Required. + :vartype type: str or ~azure.ai.projects.models.CAPTURE_STRUCTURED_OUTPUTS + :ivar outputs: The structured outputs to capture from the model. Required. + :vartype outputs: ~azure.ai.projects.models.StructuredOutputDefinition + """ + + type: Literal[ToolType.CAPTURE_STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``capture_structured_outputs``. Required.""" + outputs: "_models.StructuredOutputDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The structured outputs to capture from the model. Required.""" + + @overload + def __init__( + self, + *, + outputs: "_models.StructuredOutputDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.CAPTURE_STRUCTURED_OUTPUTS # type: ignore + + +class ChartCoordinate(_Model): + """Coordinates for the analysis chart. + + :ivar x: X-axis coordinate. Required. + :vartype x: int + :ivar y: Y-axis coordinate. Required. + :vartype y: int + :ivar size: Size of the chart element. Required. + :vartype size: int + """ + + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """X-axis coordinate. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Y-axis coordinate. Required.""" + size: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Size of the chart element. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + size: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryItem(_Model): + """A single memory item stored in the memory store, containing content and metadata. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ChatSummaryMemoryItem, UserProfileMemoryItem + + :ivar memory_id: The unique ID of the memory item. Required. + :vartype memory_id: str + :ivar updated_at: The last update time of the memory item. Required. + :vartype updated_at: ~datetime.datetime + :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :vartype scope: str + :ivar content: The content of the memory. Required. + :vartype content: str + :ivar kind: The kind of the memory item. Required. Known values are: "user_profile" and + "chat_summary". + :vartype kind: str or ~azure.ai.projects.models.MemoryItemKind + """ + + __mapping__: dict[str, _Model] = {} + memory_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the memory item. Required.""" + updated_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The last update time of the memory item. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace that logically groups and isolates memories, such as a user ID. Required.""" + content: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the memory. Required.""" + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """The kind of the memory item. Required. Known values are: \"user_profile\" and \"chat_summary\".""" + + @overload + def __init__( + self, + *, + memory_id: str, + updated_at: datetime.datetime, + scope: str, + content: str, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ChatSummaryMemoryItem(MemoryItem, discriminator="chat_summary"): + """A memory item containing a summary extracted from conversations. + + :ivar memory_id: The unique ID of the memory item. Required. + :vartype memory_id: str + :ivar updated_at: The last update time of the memory item. Required. + :vartype updated_at: ~datetime.datetime + :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :vartype scope: str + :ivar content: The content of the memory. Required. + :vartype content: str + :ivar kind: The kind of the memory item. Required. Summary of chat conversations. + :vartype kind: str or ~azure.ai.projects.models.CHAT_SUMMARY + """ + + kind: Literal[MemoryItemKind.CHAT_SUMMARY] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind of the memory item. Required. Summary of chat conversations.""" + + @overload + def __init__( + self, + *, + memory_id: str, + updated_at: datetime.datetime, + scope: str, + content: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = MemoryItemKind.CHAT_SUMMARY # type: ignore + + +class ClusterInsightResult(_Model): + """Insights from the cluster analysis. + + :ivar summary: Summary of the insights report. Required. + :vartype summary: ~azure.ai.projects.models.InsightSummary + :ivar clusters: List of clusters identified in the insights. Required. + :vartype clusters: list[~azure.ai.projects.models.InsightCluster] + :ivar coordinates: Optional mapping of IDs to 2D coordinates used by the UX for + visualization. + The map keys are string identifiers (for example, a cluster id or a sample id) + and the values are the coordinates and visual size for rendering on a 2D chart. + This property is omitted unless the client requests coordinates (for example, + by passing ``includeCoordinates=true`` as a query parameter). + Example: + { + "cluster-1": { "x": 12, "y": 34, "size": 8 }, + "sample-123": { "x": 18, "y": 22, "size": 4 } + } + Coordinates are intended only for client-side visualization and do not + modify the canonical insights results. + :vartype coordinates: dict[str, ~azure.ai.projects.models.ChartCoordinate] + """ + + summary: "_models.InsightSummary" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Summary of the insights report. Required.""" + clusters: list["_models.InsightCluster"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of clusters identified in the insights. Required.""" + coordinates: Optional[dict[str, "_models.ChartCoordinate"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """ Optional mapping of IDs to 2D coordinates used by the UX for visualization. + The map keys are string identifiers (for example, a cluster id or a sample id) + and the values are the coordinates and visual size for rendering on a 2D chart. + This property is omitted unless the client requests coordinates (for example, + by passing ``includeCoordinates=true`` as a query parameter). + Example: + { + \"cluster-1\": { \"x\": 12, \"y\": 34, \"size\": 8 }, + \"sample-123\": { \"x\": 18, \"y\": 22, \"size\": 4 } + } + Coordinates are intended only for client-side visualization and do not + modify the canonical insights results.""" + + @overload + def __init__( + self, + *, + summary: "_models.InsightSummary", + clusters: list["_models.InsightCluster"], + coordinates: Optional[dict[str, "_models.ChartCoordinate"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ClusterTokenUsage(_Model): + """Token usage for cluster analysis. + + :ivar input_token_usage: input token usage. Required. + :vartype input_token_usage: int + :ivar output_token_usage: output token usage. Required. + :vartype output_token_usage: int + :ivar total_token_usage: total token usage. Required. + :vartype total_token_usage: int + """ + + input_token_usage: int = rest_field( + name="inputTokenUsage", visibility=["read", "create", "update", "delete", "query"] + ) + """input token usage. Required.""" + output_token_usage: int = rest_field( + name="outputTokenUsage", visibility=["read", "create", "update", "delete", "query"] + ) + """output token usage. Required.""" + total_token_usage: int = rest_field( + name="totalTokenUsage", visibility=["read", "create", "update", "delete", "query"] + ) + """total token usage. Required.""" + + @overload + def __init__( + self, + *, + input_token_usage: int, + output_token_usage: int, + total_token_usage: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluatorDefinition(_Model): + """Base evaluator configuration with discriminator. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeBasedEvaluatorDefinition, PromptBasedEvaluatorDefinition + + :ivar type: The type of evaluator definition. Required. Known values are: "prompt", "code", + "prompt_and_code", "service", and "openai_graders". + :vartype type: str or ~azure.ai.projects.models.EvaluatorDefinitionType + :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. + This includes parameters like type, properties, required. + :vartype init_parameters: any + :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This + includes parameters like type, properties, required. + :vartype data_schema: any + :ivar metrics: List of output metrics produced by this evaluator. + :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of evaluator definition. Required. Known values are: \"prompt\", \"code\", + \"prompt_and_code\", \"service\", and \"openai_graders\".""" + init_parameters: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema (Draft 2020-12) for the evaluator's input parameters. This includes parameters + like type, properties, required.""" + data_schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema (Draft 2020-12) for the evaluator's input data. This includes parameters like + type, properties, required.""" + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of output metrics produced by this evaluator.""" + + @overload + def __init__( + self, + *, + type: str, + init_parameters: Optional[Any] = None, + data_schema: Optional[Any] = None, + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="code"): + """Code-based evaluator definition using python code. + + :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. + This includes parameters like type, properties, required. + :vartype init_parameters: any + :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This + includes parameters like type, properties, required. + :vartype data_schema: any + :ivar metrics: List of output metrics produced by this evaluator. + :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] + :ivar type: Required. Code-based definition + :vartype type: str or ~azure.ai.projects.models.CODE + :ivar code_text: Inline code text for the evaluator. Required. + :vartype code_text: str + """ + + type: Literal[EvaluatorDefinitionType.CODE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Code-based definition""" + code_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Inline code text for the evaluator. Required.""" + + @overload + def __init__( + self, + *, + code_text: str, + init_parameters: Optional[Any] = None, + data_schema: Optional[Any] = None, + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluatorDefinitionType.CODE # type: ignore + + +class CodeInterpreterOutput(_Model): + """CodeInterpreterOutput. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeInterpreterOutputImage, CodeInterpreterOutputLogs + + :ivar type: Required. Known values are: "logs" and "image". + :vartype type: str or ~azure.ai.projects.models.CodeInterpreterOutputType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"logs\" and \"image\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeInterpreterOutputImage(CodeInterpreterOutput, discriminator="image"): + """The image output from the code interpreter. + + :ivar type: The type of the output. Always 'image'. Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE + :ivar url: The URL of the image output from the code interpreter. Required. + :vartype url: str + """ + + type: Literal[CodeInterpreterOutputType.IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output. Always 'image'. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the image output from the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + url: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CodeInterpreterOutputType.IMAGE # type: ignore + + +class CodeInterpreterOutputLogs(CodeInterpreterOutput, discriminator="logs"): + """The logs output from the code interpreter. + + :ivar type: The type of the output. Always 'logs'. Required. + :vartype type: str or ~azure.ai.projects.models.LOGS + :ivar logs: The logs output from the code interpreter. Required. + :vartype logs: str + """ + + type: Literal[CodeInterpreterOutputType.LOGS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output. Always 'logs'. Required.""" + logs: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The logs output from the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + logs: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CodeInterpreterOutputType.LOGS # type: ignore + + +class CodeInterpreterTool(Tool, discriminator="code_interpreter"): + """A tool that runs Python code to help generate a response to a prompt. + + :ivar type: The type of the code interpreter tool. Always ``code_interpreter``. Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER + :ivar container: The code interpreter container. Can be a container ID or an object that + specifies uploaded file IDs to make available to your code. Required. Is either a str type or a + CodeInterpreterToolAuto type. + :vartype container: str or ~azure.ai.projects.models.CodeInterpreterToolAuto + """ + + type: Literal[ToolType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the code interpreter tool. Always ``code_interpreter``. Required.""" + container: Union[str, "_models.CodeInterpreterToolAuto"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The code interpreter container. Can be a container ID or an object that + specifies uploaded file IDs to make available to your code. Required. Is either a str type or a + CodeInterpreterToolAuto type.""" + + @overload + def __init__( + self, + *, + container: Union[str, "_models.CodeInterpreterToolAuto"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.CODE_INTERPRETER # type: ignore + + +class CodeInterpreterToolAuto(_Model): + """Configuration for a code interpreter container. Optionally specify the IDs + of the files to run the code on. + + :ivar type: Always ``auto``. Required. Default value is "auto". + :vartype type: str + :ivar file_ids: An optional list of uploaded files to make available to your code. + :vartype file_ids: list[str] + """ + + type: Literal["auto"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Always ``auto``. Required. Default value is \"auto\".""" + file_ids: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An optional list of uploaded files to make available to your code.""" + + @overload + def __init__( + self, + *, + file_ids: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["auto"] = "auto" + + +class ItemParam(_Model): + """Content item used to generate a response. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeInterpreterToolCallItemParam, ComputerToolCallItemParam, ComputerToolCallOutputItemParam, + FileSearchToolCallItemParam, FunctionToolCallItemParam, FunctionToolCallOutputItemParam, + ImageGenToolCallItemParam, ItemReferenceItemParam, LocalShellToolCallItemParam, + LocalShellToolCallOutputItemParam, MCPApprovalRequestItemParam, MCPApprovalResponseItemParam, + MCPCallItemParam, MCPListToolsItemParam, MemorySearchToolCallItemParam, + ResponsesMessageItemParam, ReasoningItemParam, WebSearchToolCallItemParam + + :ivar type: Required. Known values are: "message", "file_search_call", "function_call", + "function_call_output", "computer_call", "computer_call_output", "web_search_call", + "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", + "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", + "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", + "memory_search_call", and "oauth_consent_request". + :vartype type: str or ~azure.ai.projects.models.ItemType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", + \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", + \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", + \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", + \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", + \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeInterpreterToolCallItemParam(ItemParam, discriminator="code_interpreter_call"): + """A tool call to run code. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL + :ivar container_id: The ID of the container used to run the code. Required. + :vartype container_id: str + :ivar code: The code to run, or null if not available. Required. + :vartype code: str + :ivar outputs: The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required. + :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] + """ + + type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container used to run the code. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The code to run, or null if not available. Required.""" + outputs: list["_models.CodeInterpreterOutput"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required.""" + + @overload + def __init__( + self, + *, + container_id: str, + code: str, + outputs: list["_models.CodeInterpreterOutput"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore + + +class ItemResource(_Model): + """Content item used to generate a response. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CodeInterpreterToolCallItemResource, ComputerToolCallItemResource, + ComputerToolCallOutputItemResource, FileSearchToolCallItemResource, + FunctionToolCallItemResource, FunctionToolCallOutputItemResource, ImageGenToolCallItemResource, + LocalShellToolCallItemResource, LocalShellToolCallOutputItemResource, + MCPApprovalRequestItemResource, MCPApprovalResponseItemResource, MCPCallItemResource, + MCPListToolsItemResource, MemorySearchToolCallItemResource, ResponsesMessageItemResource, + OAuthConsentRequestItemResource, ReasoningItemResource, StructuredOutputsItemResource, + WebSearchToolCallItemResource, WorkflowActionOutputItemResource + + :ivar type: Required. Known values are: "message", "file_search_call", "function_call", + "function_call_output", "computer_call", "computer_call_output", "web_search_call", + "reasoning", "item_reference", "image_generation_call", "code_interpreter_call", + "local_shell_call", "local_shell_call_output", "mcp_list_tools", "mcp_approval_request", + "mcp_approval_response", "mcp_call", "structured_outputs", "workflow_action", + "memory_search_call", and "oauth_consent_request". + :vartype type: str or ~azure.ai.projects.models.ItemType + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"message\", \"file_search_call\", \"function_call\", + \"function_call_output\", \"computer_call\", \"computer_call_output\", \"web_search_call\", + \"reasoning\", \"item_reference\", \"image_generation_call\", \"code_interpreter_call\", + \"local_shell_call\", \"local_shell_call_output\", \"mcp_list_tools\", + \"mcp_approval_request\", \"mcp_approval_response\", \"mcp_call\", \"structured_outputs\", + \"workflow_action\", \"memory_search_call\", and \"oauth_consent_request\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + created_by: Optional["_models.CreatedBy"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The information about the creator of the item.""" + + @overload + def __init__( + self, + *, + type: str, + id: str, # pylint: disable=redefined-builtin + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CodeInterpreterToolCallItemResource(ItemResource, discriminator="code_interpreter_call"): + """A tool call to run code. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER_CALL + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["incomplete"], Literal["interpreting"], Literal["failed"] + :vartype status: str or str or str or str or str + :ivar container_id: The ID of the container used to run the code. Required. + :vartype container_id: str + :ivar code: The code to run, or null if not available. Required. + :vartype code: str + :ivar outputs: The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required. + :vartype outputs: list[~azure.ai.projects.models.CodeInterpreterOutput] + """ + + type: Literal[ItemType.CODE_INTERPRETER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"interpreting\"], Literal[\"failed\"]""" + container_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the container used to run the code. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The code to run, or null if not available. Required.""" + outputs: list["_models.CodeInterpreterOutput"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The outputs generated by the code interpreter, such as logs or images. + Can be null if no outputs are available. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete", "interpreting", "failed"], + container_id: str, + code: str, + outputs: list["_models.CodeInterpreterOutput"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.CODE_INTERPRETER_CALL # type: ignore + + +class ComparisonFilter(_Model): + """A filter used to compare a specified attribute key to a given value using a defined comparison + operation. + + :ivar type: Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, + ``lte``. + * `eq`: equals + * `ne`: not equal + * `gt`: greater than + * `gte`: greater than or equal + * `lt`: less than + * `lte`: less than or equal. Required. Is one of the following types: Literal["eq"], + Literal["ne"], Literal["gt"], Literal["gte"], Literal["lt"], Literal["lte"] + :vartype type: str or str or str or str or str or str + :ivar key: The key to compare against the value. Required. + :vartype key: str + :ivar value: The value to compare against the attribute key; supports string, number, or + boolean types. Required. Is one of the following types: str, float, bool + :vartype value: str or float or bool + """ + + type: Literal["eq", "ne", "gt", "gte", "lt", "lte"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Specifies the comparison operator: ``eq``, ``ne``, ``gt``, ``gte``, ``lt``, ``lte``. + * `eq`: equals + * `ne`: not equal + * `gt`: greater than + * `gte`: greater than or equal + * `lt`: less than + * `lte`: less than or equal. Required. Is one of the following types: Literal[\"eq\"], + Literal[\"ne\"], Literal[\"gt\"], Literal[\"gte\"], Literal[\"lt\"], Literal[\"lte\"]""" + key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The key to compare against the value. Required.""" + value: Union[str, float, bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value to compare against the attribute key; supports string, number, or boolean types. + Required. Is one of the following types: str, float, bool""" + + @overload + def __init__( + self, + *, + type: Literal["eq", "ne", "gt", "gte", "lt", "lte"], + key: str, + value: Union[str, float, bool], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CompoundFilter(_Model): + """Combine multiple filters using ``and`` or ``or``. + + :ivar type: Type of operation: ``and`` or ``or``. Required. Is either a Literal["and"] type or + a Literal["or"] type. + :vartype type: str or str + :ivar filters: Array of filters to combine. Items can be ``ComparisonFilter`` or + ``CompoundFilter``. Required. + :vartype filters: list[~azure.ai.projects.models.ComparisonFilter or + ~azure.ai.projects.models.CompoundFilter] + """ + + type: Literal["and", "or"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Type of operation: ``and`` or ``or``. Required. Is either a Literal[\"and\"] type or a + Literal[\"or\"] type.""" + filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Array of filters to combine. Items can be ``ComparisonFilter`` or ``CompoundFilter``. Required.""" + + @overload + def __init__( + self, + *, + type: Literal["and", "or"], + filters: list[Union["_models.ComparisonFilter", "_models.CompoundFilter"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerAction(_Model): + """ComputerAction. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ComputerActionClick, ComputerActionDoubleClick, ComputerActionDrag, ComputerActionKeyPress, + ComputerActionMove, ComputerActionScreenshot, ComputerActionScroll, ComputerActionTypeKeys, + ComputerActionWait + + :ivar type: Required. Known values are: "screenshot", "click", "double_click", "scroll", + "type", "wait", "keypress", "drag", and "move". + :vartype type: str or ~azure.ai.projects.models.ComputerActionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"screenshot\", \"click\", \"double_click\", \"scroll\", \"type\", + \"wait\", \"keypress\", \"drag\", and \"move\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerActionClick(ComputerAction, discriminator="click"): + """A click action. + + :ivar type: Specifies the event type. For a click action, this property is + always set to ``click``. Required. + :vartype type: str or ~azure.ai.projects.models.CLICK + :ivar button: Indicates which mouse button was pressed during the click. One of ``left``, + ``right``, ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: + Literal["left"], Literal["right"], Literal["wheel"], Literal["back"], Literal["forward"] + :vartype button: str or str or str or str or str + :ivar x: The x-coordinate where the click occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the click occurred. Required. + :vartype y: int + """ + + type: Literal[ComputerActionType.CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a click action, this property is + always set to ``click``. Required.""" + button: Literal["left", "right", "wheel", "back", "forward"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates which mouse button was pressed during the click. One of ``left``, ``right``, + ``wheel``, ``back``, or ``forward``. Required. Is one of the following types: + Literal[\"left\"], Literal[\"right\"], Literal[\"wheel\"], Literal[\"back\"], + Literal[\"forward\"]""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the click occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the click occurred. Required.""" + + @overload + def __init__( + self, + *, + button: Literal["left", "right", "wheel", "back", "forward"], + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.CLICK # type: ignore + + +class ComputerActionDoubleClick(ComputerAction, discriminator="double_click"): + """A double click action. + + :ivar type: Specifies the event type. For a double click action, this property is + always set to ``double_click``. Required. + :vartype type: str or ~azure.ai.projects.models.DOUBLE_CLICK + :ivar x: The x-coordinate where the double click occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the double click occurred. Required. + :vartype y: int + """ + + type: Literal[ComputerActionType.DOUBLE_CLICK] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a double click action, this property is + always set to ``double_click``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the double click occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the double click occurred. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.DOUBLE_CLICK # type: ignore + + +class ComputerActionDrag(ComputerAction, discriminator="drag"): + """A drag action. + + :ivar type: Specifies the event type. For a drag action, this property is + always set to ``drag``. Required. + :vartype type: str or ~azure.ai.projects.models.DRAG + :ivar path: An array of coordinates representing the path of the drag action. Coordinates will + appear as an array + of objects, eg + .. code-block:: + [ + { x: 100, y: 200 }, + { x: 200, y: 300 } + ]. Required. + :vartype path: list[~azure.ai.projects.models.Coordinate] + """ + + type: Literal[ComputerActionType.DRAG] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a drag action, this property is + always set to ``drag``. Required.""" + path: list["_models.Coordinate"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of coordinates representing the path of the drag action. Coordinates will appear as an + array + of objects, eg + .. code-block:: + [ + { x: 100, y: 200 }, + { x: 200, y: 300 } + ]. Required.""" + + @overload + def __init__( + self, + *, + path: list["_models.Coordinate"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.DRAG # type: ignore + + +class ComputerActionKeyPress(ComputerAction, discriminator="keypress"): + """A collection of keypresses the model would like to perform. + + :ivar type: Specifies the event type. For a keypress action, this property is + always set to ``keypress``. Required. + :vartype type: str or ~azure.ai.projects.models.KEYPRESS + :ivar keys_property: The combination of keys the model is requesting to be pressed. This is an + array of strings, each representing a key. Required. + :vartype keys_property: list[str] + """ + + type: Literal[ComputerActionType.KEYPRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a keypress action, this property is + always set to ``keypress``. Required.""" + keys_property: list[str] = rest_field(name="keys", visibility=["read", "create", "update", "delete", "query"]) + """The combination of keys the model is requesting to be pressed. This is an + array of strings, each representing a key. Required.""" + + @overload + def __init__( + self, + *, + keys_property: list[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.KEYPRESS # type: ignore + + +class ComputerActionMove(ComputerAction, discriminator="move"): + """A mouse move action. + + :ivar type: Specifies the event type. For a move action, this property is + always set to ``move``. Required. + :vartype type: str or ~azure.ai.projects.models.MOVE + :ivar x: The x-coordinate to move to. Required. + :vartype x: int + :ivar y: The y-coordinate to move to. Required. + :vartype y: int + """ + + type: Literal[ComputerActionType.MOVE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a move action, this property is + always set to ``move``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate to move to. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate to move to. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.MOVE # type: ignore + + +class ComputerActionScreenshot(ComputerAction, discriminator="screenshot"): + """A screenshot action. + + :ivar type: Specifies the event type. For a screenshot action, this property is + always set to ``screenshot``. Required. + :vartype type: str or ~azure.ai.projects.models.SCREENSHOT + """ + + type: Literal[ComputerActionType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a screenshot action, this property is + always set to ``screenshot``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.SCREENSHOT # type: ignore + + +class ComputerActionScroll(ComputerAction, discriminator="scroll"): + """A scroll action. + + :ivar type: Specifies the event type. For a scroll action, this property is + always set to ``scroll``. Required. + :vartype type: str or ~azure.ai.projects.models.SCROLL + :ivar x: The x-coordinate where the scroll occurred. Required. + :vartype x: int + :ivar y: The y-coordinate where the scroll occurred. Required. + :vartype y: int + :ivar scroll_x: The horizontal scroll distance. Required. + :vartype scroll_x: int + :ivar scroll_y: The vertical scroll distance. Required. + :vartype scroll_y: int + """ + + type: Literal[ComputerActionType.SCROLL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a scroll action, this property is + always set to ``scroll``. Required.""" + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate where the scroll occurred. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate where the scroll occurred. Required.""" + scroll_x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The horizontal scroll distance. Required.""" + scroll_y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The vertical scroll distance. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + scroll_x: int, + scroll_y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.SCROLL # type: ignore + + +class ComputerActionTypeKeys(ComputerAction, discriminator="type"): + """An action to type in text. + + :ivar type: Specifies the event type. For a type action, this property is + always set to ``type``. Required. + :vartype type: str or ~azure.ai.projects.models.TYPE + :ivar text: The text to type. Required. + :vartype text: str + """ + + type: Literal[ComputerActionType.TYPE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a type action, this property is + always set to ``type``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text to type. Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.TYPE # type: ignore + + +class ComputerActionWait(ComputerAction, discriminator="wait"): + """A wait action. + + :ivar type: Specifies the event type. For a wait action, this property is + always set to ``wait``. Required. + :vartype type: str or ~azure.ai.projects.models.WAIT + """ + + type: Literal[ComputerActionType.WAIT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Specifies the event type. For a wait action, this property is + always set to ``wait``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerActionType.WAIT # type: ignore + + +class ComputerToolCallItemParam(ItemParam, discriminator="computer_call"): + """A tool call to a computer use tool. See the + `computer use guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL + :ivar call_id: An identifier used when responding to the tool call with output. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.ComputerAction + :ivar pending_safety_checks: The pending safety checks for the computer call. Required. + :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + """ + + type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier used when responding to the tool call with output. Required.""" + action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The pending safety checks for the computer call. Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + action: "_models.ComputerAction", + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL # type: ignore + + +class ComputerToolCallItemResource(ItemResource, discriminator="computer_call"): + """A tool call to a computer use tool. See the + `computer use guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: An identifier used when responding to the tool call with output. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.ComputerAction + :ivar pending_safety_checks: The pending safety checks for the computer call. Required. + :vartype pending_safety_checks: list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + """ + + type: Literal[ItemType.COMPUTER_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An identifier used when responding to the tool call with output. Required.""" + action: "_models.ComputerAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The pending safety checks for the computer call. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + action: "_models.ComputerAction", + pending_safety_checks: list["_models.ComputerToolCallSafetyCheck"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL # type: ignore + + +class ComputerToolCallOutputItemOutput(_Model): + """ComputerToolCallOutputItemOutput. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ComputerToolCallOutputItemOutputComputerScreenshot + + :ivar type: Required. "computer_screenshot" + :vartype type: str or ~azure.ai.projects.models.ComputerToolCallOutputItemOutputType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. \"computer_screenshot\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerToolCallOutputItemOutputComputerScreenshot( + ComputerToolCallOutputItemOutput, discriminator="computer_screenshot" +): # pylint: disable=name-too-long + """ComputerToolCallOutputItemOutputComputerScreenshot. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.SCREENSHOT + :ivar image_url: + :vartype image_url: str + :ivar file_id: + :vartype file_id: str + """ + + type: Literal[ComputerToolCallOutputItemOutputType.SCREENSHOT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + image_url: Optional[str] = None, + file_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ComputerToolCallOutputItemOutputType.SCREENSHOT # type: ignore + + +class ComputerToolCallOutputItemParam(ItemParam, discriminator="computer_call_output"): + """The output of a computer tool call. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT + :ivar call_id: The ID of the computer tool call that produced the output. Required. + :vartype call_id: str + :ivar acknowledged_safety_checks: The safety checks reported by the API that have been + acknowledged by the + developer. + :vartype acknowledged_safety_checks: + list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + :ivar output: Required. + :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput + """ + + type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the computer tool call that produced the output. Required.""" + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The safety checks reported by the API that have been acknowledged by the + developer.""" + output: "_models.ComputerToolCallOutputItemOutput" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + output: "_models.ComputerToolCallOutputItemOutput", + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore + + +class ComputerToolCallOutputItemResource(ItemResource, discriminator="computer_call_output"): + """The output of a computer tool call. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_CALL_OUTPUT + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The ID of the computer tool call that produced the output. Required. + :vartype call_id: str + :ivar acknowledged_safety_checks: The safety checks reported by the API that have been + acknowledged by the + developer. + :vartype acknowledged_safety_checks: + list[~azure.ai.projects.models.ComputerToolCallSafetyCheck] + :ivar output: Required. + :vartype output: ~azure.ai.projects.models.ComputerToolCallOutputItemOutput + """ + + type: Literal[ItemType.COMPUTER_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the computer tool call that produced the output. Required.""" + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The safety checks reported by the API that have been acknowledged by the + developer.""" + output: "_models.ComputerToolCallOutputItemOutput" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + output: "_models.ComputerToolCallOutputItemOutput", + created_by: Optional["_models.CreatedBy"] = None, + acknowledged_safety_checks: Optional[list["_models.ComputerToolCallSafetyCheck"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.COMPUTER_CALL_OUTPUT # type: ignore + + +class ComputerToolCallSafetyCheck(_Model): + """A pending safety check for the computer call. + + :ivar id: The ID of the pending safety check. Required. + :vartype id: str + :ivar code: The type of the pending safety check. Required. + :vartype code: str + :ivar message: Details about the pending safety check. Required. + :vartype message: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the pending safety check. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the pending safety check. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Details about the pending safety check. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + code: str, + message: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ComputerUsePreviewTool(Tool, discriminator="computer_use_preview"): + """A tool that controls a virtual computer. Learn more about the `computer tool + `_. + + :ivar type: The type of the computer use tool. Always ``computer_use_preview``. Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER_USE_PREVIEW + :ivar environment: The type of computer environment to control. Required. Is one of the + following types: Literal["windows"], Literal["mac"], Literal["linux"], Literal["ubuntu"], + Literal["browser"] + :vartype environment: str or str or str or str or str + :ivar display_width: The width of the computer display. Required. + :vartype display_width: int + :ivar display_height: The height of the computer display. Required. + :vartype display_height: int + """ + + type: Literal[ToolType.COMPUTER_USE_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the computer use tool. Always ``computer_use_preview``. Required.""" + environment: Literal["windows", "mac", "linux", "ubuntu", "browser"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of computer environment to control. Required. Is one of the following types: + Literal[\"windows\"], Literal[\"mac\"], Literal[\"linux\"], Literal[\"ubuntu\"], + Literal[\"browser\"]""" + display_width: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The width of the computer display. Required.""" + display_height: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The height of the computer display. Required.""" + + @overload + def __init__( + self, + *, + environment: Literal["windows", "mac", "linux", "ubuntu", "browser"], + display_width: int, + display_height: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.COMPUTER_USE_PREVIEW # type: ignore + + +class Connection(_Model): + """Response from the list and get connections operations. + + :ivar name: The friendly name of the connection, provided by the user. Required. + :vartype name: str + :ivar id: A unique identifier for the connection, generated by the service. Required. + :vartype id: str + :ivar type: Category of the connection. Required. Known values are: "AzureOpenAI", "AzureBlob", + "AzureStorageAccount", "CognitiveSearch", "CosmosDB", "ApiKey", "AppConfig", "AppInsights", + "CustomKeys", and "RemoteTool". + :vartype type: str or ~azure.ai.projects.models.ConnectionType + :ivar target: The connection URL to be used for this service. Required. + :vartype target: str + :ivar is_default: Whether the connection is tagged as the default connection of its type. + Required. + :vartype is_default: bool + :ivar credentials: The credentials used by the connection. Required. + :vartype credentials: ~azure.ai.projects.models.BaseCredentials + :ivar metadata: Metadata of the connection. Required. + :vartype metadata: dict[str, str] + """ + + name: str = rest_field(visibility=["read"]) + """The friendly name of the connection, provided by the user. Required.""" + id: str = rest_field(visibility=["read"]) + """A unique identifier for the connection, generated by the service. Required.""" + type: Union[str, "_models.ConnectionType"] = rest_field(visibility=["read"]) + """Category of the connection. Required. Known values are: \"AzureOpenAI\", \"AzureBlob\", + \"AzureStorageAccount\", \"CognitiveSearch\", \"CosmosDB\", \"ApiKey\", \"AppConfig\", + \"AppInsights\", \"CustomKeys\", and \"RemoteTool\".""" + target: str = rest_field(visibility=["read"]) + """The connection URL to be used for this service. Required.""" + is_default: bool = rest_field(name="isDefault", visibility=["read"]) + """Whether the connection is tagged as the default connection of its type. Required.""" + credentials: "_models.BaseCredentials" = rest_field(visibility=["read"]) + """The credentials used by the connection. Required.""" + metadata: dict[str, str] = rest_field(visibility=["read"]) + """Metadata of the connection. Required.""" + + +class ContainerAppAgentDefinition(AgentDefinition, discriminator="container_app"): + """The container app agent definition. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.CONTAINER_APP + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar container_app_resource_id: The resource ID of the Azure Container App that hosts this + agent. Not mutable across versions. Required. + :vartype container_app_resource_id: str + :ivar ingress_subdomain_suffix: The suffix to apply to the app subdomain when sending ingress + to the agent. This can be a label (e.g., '---current'), a specific revision (e.g., + '--0000001'), or empty to use the default endpoint for the container app. Required. + :vartype ingress_subdomain_suffix: str + """ + + kind: Literal[AgentKind.CONTAINER_APP] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocols that the agent supports for ingress communication of the containers. Required.""" + container_app_resource_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The resource ID of the Azure Container App that hosts this agent. Not mutable across versions. + Required.""" + ingress_subdomain_suffix: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The suffix to apply to the app subdomain when sending ingress to the agent. This can be a label + (e.g., '---current'), a specific revision (e.g., '--0000001'), or empty to use the default + endpoint for the container app. Required.""" + + @overload + def __init__( + self, + *, + container_protocol_versions: list["_models.ProtocolVersionRecord"], + container_app_resource_id: str, + ingress_subdomain_suffix: str, + rai_config: Optional["_models.RaiConfig"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.CONTAINER_APP # type: ignore + + +class EvaluationRuleAction(_Model): + """Evaluation action model. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ContinuousEvaluationRuleAction, HumanEvaluationRuleAction + + :ivar type: Type of the evaluation action. Required. Known values are: "continuousEvaluation" + and "humanEvaluation". + :vartype type: str or ~azure.ai.projects.models.EvaluationRuleActionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the evaluation action. Required. Known values are: \"continuousEvaluation\" and + \"humanEvaluation\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ContinuousEvaluationRuleAction(EvaluationRuleAction, discriminator="continuousEvaluation"): + """Evaluation rule action for continuous evaluation. + + :ivar type: Required. Continuous evaluation. + :vartype type: str or ~azure.ai.projects.models.CONTINUOUS_EVALUATION + :ivar eval_id: Eval Id to add continuous evaluation runs to. Required. + :vartype eval_id: str + :ivar max_hourly_runs: Maximum number of evaluation runs allowed per hour. + :vartype max_hourly_runs: int + """ + + type: Literal[EvaluationRuleActionType.CONTINUOUS_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Continuous evaluation.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Eval Id to add continuous evaluation runs to. Required.""" + max_hourly_runs: Optional[int] = rest_field( + name="maxHourlyRuns", visibility=["read", "create", "update", "delete", "query"] + ) + """Maximum number of evaluation runs allowed per hour.""" + + @overload + def __init__( + self, + *, + eval_id: str, + max_hourly_runs: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluationRuleActionType.CONTINUOUS_EVALUATION # type: ignore + + +class Coordinate(_Model): + """An x/y coordinate pair, e.g. ``{ x: 100, y: 200 }``. + + :ivar x: The x-coordinate. Required. + :vartype x: int + :ivar y: The y-coordinate. Required. + :vartype y: int + """ + + x: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The x-coordinate. Required.""" + y: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The y-coordinate. Required.""" + + @overload + def __init__( + self, + *, + x: int, + y: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CosmosDBIndex(Index, discriminator="CosmosDBNoSqlVectorStore"): + """CosmosDB Vector Store Index Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. CosmosDB + :vartype type: str or ~azure.ai.projects.models.COSMOS_DB + :ivar connection_name: Name of connection to CosmosDB. Required. + :vartype connection_name: str + :ivar database_name: Name of the CosmosDB Database. Required. + :vartype database_name: str + :ivar container_name: Name of CosmosDB Container. Required. + :vartype container_name: str + :ivar embedding_configuration: Embedding model configuration. Required. + :vartype embedding_configuration: ~azure.ai.projects.models.EmbeddingConfiguration + :ivar field_mapping: Field mapping configuration. Required. + :vartype field_mapping: ~azure.ai.projects.models.FieldMapping + """ + + type: Literal[IndexType.COSMOS_DB] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. CosmosDB""" + connection_name: str = rest_field(name="connectionName", visibility=["create"]) + """Name of connection to CosmosDB. Required.""" + database_name: str = rest_field(name="databaseName", visibility=["create"]) + """Name of the CosmosDB Database. Required.""" + container_name: str = rest_field(name="containerName", visibility=["create"]) + """Name of CosmosDB Container. Required.""" + embedding_configuration: "_models.EmbeddingConfiguration" = rest_field( + name="embeddingConfiguration", visibility=["create"] + ) + """Embedding model configuration. Required.""" + field_mapping: "_models.FieldMapping" = rest_field(name="fieldMapping", visibility=["create"]) + """Field mapping configuration. Required.""" + + @overload + def __init__( + self, + *, + connection_name: str, + database_name: str, + container_name: str, + embedding_configuration: "_models.EmbeddingConfiguration", + field_mapping: "_models.FieldMapping", + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = IndexType.COSMOS_DB # type: ignore + + +class CreatedBy(_Model): + """CreatedBy. + + :ivar agent: The agent that created the item. + :vartype agent: ~azure.ai.projects.models.AgentId + :ivar response_id: The response on which the item is created. + :vartype response_id: str + """ + + agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The agent that created the item.""" + response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response on which the item is created.""" + + @overload + def __init__( + self, + *, + agent: Optional["_models.AgentId"] = None, + response_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Trigger(_Model): + """Base model for Trigger of the schedule. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CronTrigger, OneTimeTrigger, RecurrenceTrigger + + :ivar type: Type of the trigger. Required. Known values are: "Cron", "Recurrence", and + "OneTime". + :vartype type: str or ~azure.ai.projects.models.TriggerType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the trigger. Required. Known values are: \"Cron\", \"Recurrence\", and \"OneTime\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CronTrigger(Trigger, discriminator="Cron"): + """Cron based trigger. + + :ivar type: Required. Cron based trigger. + :vartype type: str or ~azure.ai.projects.models.CRON + :ivar expression: Cron expression that defines the schedule frequency. Required. + :vartype expression: str + :ivar time_zone: Time zone for the cron schedule. + :vartype time_zone: str + :ivar start_time: Start time for the cron schedule in ISO 8601 format. + :vartype start_time: str + :ivar end_time: End time for the cron schedule in ISO 8601 format. + :vartype end_time: str + """ + + type: Literal[TriggerType.CRON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Cron based trigger.""" + expression: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Cron expression that defines the schedule frequency. Required.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the cron schedule.""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) + """Start time for the cron schedule in ISO 8601 format.""" + end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) + """End time for the cron schedule in ISO 8601 format.""" + + @overload + def __init__( + self, + *, + expression: str, + time_zone: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = TriggerType.CRON # type: ignore + + +class CustomCredential(BaseCredentials, discriminator="CustomKeys"): + """Custom credential definition. + + :ivar type: The credential type. Required. Custom credential + :vartype type: str or ~azure.ai.projects.models.CUSTOM + """ + + type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Custom credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.CUSTOM # type: ignore + + +class RecurrenceSchedule(_Model): + """Recurrence schedule model. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DailyRecurrenceSchedule, HourlyRecurrenceSchedule, MonthlyRecurrenceSchedule, + WeeklyRecurrenceSchedule + + :ivar type: Recurrence type for the recurrence schedule. Required. Known values are: "Hourly", + "Daily", "Weekly", and "Monthly". + :vartype type: str or ~azure.ai.projects.models.RecurrenceType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Recurrence type for the recurrence schedule. Required. Known values are: \"Hourly\", \"Daily\", + \"Weekly\", and \"Monthly\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DailyRecurrenceSchedule(RecurrenceSchedule, discriminator="Daily"): + """Daily recurrence schedule. + + :ivar type: Daily recurrence type. Required. Daily recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.DAILY + :ivar hours: Hours for the recurrence schedule. Required. + :vartype hours: list[int] + """ + + type: Literal[RecurrenceType.DAILY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Daily recurrence type. Required. Daily recurrence pattern.""" + hours: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Hours for the recurrence schedule. Required.""" + + @overload + def __init__( + self, + *, + hours: list[int], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.DAILY # type: ignore + + +class DatasetCredential(_Model): + """Represents a reference to a blob for consumption. + + :ivar blob_reference: Credential info to access the storage account. Required. + :vartype blob_reference: ~azure.ai.projects.models.BlobReference + """ + + blob_reference: "_models.BlobReference" = rest_field( + name="blobReference", visibility=["read", "create", "update", "delete", "query"] + ) + """Credential info to access the storage account. Required.""" + + @overload + def __init__( + self, + *, + blob_reference: "_models.BlobReference", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DatasetVersion(_Model): + """DatasetVersion Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FileDatasetVersion, FolderDatasetVersion + + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. + :vartype data_uri: str + :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". + :vartype type: str or ~azure.ai.projects.models.DatasetType + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + data_uri: str = rest_field(name="dataUri", visibility=["read", "create"]) + """URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" + is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) + """Indicates if the dataset holds a reference to the storage, or the dataset manages storage + itself. If true, the underlying data will not be deleted when the dataset version is deleted.""" + connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read", "create"]) + """The Azure Storage Account connection name. Required if startPendingUploadVersion was not called + before creating the Dataset.""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + data_uri: str, + type: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DeleteAgentResponse(_Model): + """A deleted agent Object. + + :ivar object: The object type. Always 'agent.deleted'. Required. Default value is + "agent.deleted". + :vartype object: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar deleted: Whether the agent was successfully deleted. Required. + :vartype deleted: bool + """ + + object: Literal["agent.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'agent.deleted'. Required. Default value is \"agent.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the agent was successfully deleted. Required.""" + + @overload + def __init__( + self, + *, + name: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.deleted"] = "agent.deleted" + + +class DeleteAgentVersionResponse(_Model): + """A deleted agent version Object. + + :ivar object: The object type. Always 'agent.deleted'. Required. Default value is + "agent.version.deleted". + :vartype object: str + :ivar name: The name of the agent. Required. + :vartype name: str + :ivar version: The version identifier of the agent. Required. + :vartype version: str + :ivar deleted: Whether the agent was successfully deleted. Required. + :vartype deleted: bool + """ + + object: Literal["agent.version.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'agent.deleted'. Required. Default value is \"agent.version.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the agent. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version identifier of the agent. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the agent was successfully deleted. Required.""" + + @overload + def __init__( + self, + *, + name: str, + version: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["agent.version.deleted"] = "agent.version.deleted" + + +class DeleteMemoryStoreResponse(_Model): + """DeleteMemoryStoreResponse. + + :ivar object: The object type. Always 'memory_store.deleted'. Required. Default value is + "memory_store.deleted". + :vartype object: str + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar deleted: Whether the memory store was successfully deleted. Required. + :vartype deleted: bool + """ + + object: Literal["memory_store.deleted"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type. Always 'memory_store.deleted'. Required. Default value is + \"memory_store.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the memory store was successfully deleted. Required.""" + + @overload + def __init__( + self, + *, + name: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["memory_store.deleted"] = "memory_store.deleted" + + +class Deployment(_Model): + """Model Deployment Definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ModelDeployment + + :ivar type: The type of the deployment. Required. "ModelDeployment" + :vartype type: str or ~azure.ai.projects.models.DeploymentType + :ivar name: Name of the deployment. Required. + :vartype name: str + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of the deployment. Required. \"ModelDeployment\"""" + name: str = rest_field(visibility=["read"]) + """Name of the deployment. Required.""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EmbeddingConfiguration(_Model): + """Embedding configuration class. + + :ivar model_deployment_name: Deployment name of embedding model. It can point to a model + deployment either in the parent AIServices or a connection. Required. + :vartype model_deployment_name: str + :ivar embedding_field: Embedding field. Required. + :vartype embedding_field: str + """ + + model_deployment_name: str = rest_field(name="modelDeploymentName", visibility=["create"]) + """Deployment name of embedding model. It can point to a model deployment either in the parent + AIServices or a connection. Required.""" + embedding_field: str = rest_field(name="embeddingField", visibility=["create"]) + """Embedding field. Required.""" + + @overload + def __init__( + self, + *, + model_deployment_name: str, + embedding_field: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EntraIDCredentials(BaseCredentials, discriminator="AAD"): + """Entra ID credential definition. + + :ivar type: The credential type. Required. Entra ID credential (formerly known as AAD) + :vartype type: str or ~azure.ai.projects.models.ENTRA_ID + """ + + type: Literal[CredentialType.ENTRA_ID] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Entra ID credential (formerly known as AAD)""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.ENTRA_ID # type: ignore + + +class EvalCompareReport(InsightResult, discriminator="EvaluationComparison"): + """Insights from the evaluation comparison. + + :ivar type: The type of insights result. Required. Evaluation Comparison. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON + :ivar comparisons: Comparison results for each treatment run against the baseline. Required. + :vartype comparisons: list[~azure.ai.projects.models.EvalRunResultComparison] + :ivar method: The statistical method used for comparison. Required. + :vartype method: str + """ + + type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Evaluation Comparison.""" + comparisons: list["_models.EvalRunResultComparison"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Comparison results for each treatment run against the baseline. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The statistical method used for comparison. Required.""" + + @overload + def __init__( + self, + *, + comparisons: list["_models.EvalRunResultComparison"], + method: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_COMPARISON # type: ignore + + +class EvalResult(_Model): + """Result of the evaluation. + + :ivar name: name of the check. Required. + :vartype name: str + :ivar type: type of the check. Required. + :vartype type: str + :ivar score: score. Required. + :vartype score: float + :ivar passed: indicates if the check passed or failed. Required. + :vartype passed: bool + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """name of the check. Required.""" + type: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """type of the check. Required.""" + score: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """score. Required.""" + passed: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """indicates if the check passed or failed. Required.""" + + @overload + def __init__( + self, + *, + name: str, + type: str, + score: float, + passed: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvalRunResultCompareItem(_Model): + """Metric comparison for a treatment against the baseline. + + :ivar treatment_run_id: The treatment run ID. Required. + :vartype treatment_run_id: str + :ivar treatment_run_summary: Summary statistics of the treatment run. Required. + :vartype treatment_run_summary: ~azure.ai.projects.models.EvalRunResultSummary + :ivar delta_estimate: Estimated difference between treatment and baseline. Required. + :vartype delta_estimate: float + :ivar p_value: P-value for the treatment effect. Required. + :vartype p_value: float + :ivar treatment_effect: Type of treatment effect. Required. Known values are: "TooFewSamples", + "Inconclusive", "Changed", "Improved", and "Degraded". + :vartype treatment_effect: str or ~azure.ai.projects.models.TreatmentEffectType + """ + + treatment_run_id: str = rest_field( + name="treatmentRunId", visibility=["read", "create", "update", "delete", "query"] + ) + """The treatment run ID. Required.""" + treatment_run_summary: "_models.EvalRunResultSummary" = rest_field( + name="treatmentRunSummary", visibility=["read", "create", "update", "delete", "query"] + ) + """Summary statistics of the treatment run. Required.""" + delta_estimate: float = rest_field(name="deltaEstimate", visibility=["read", "create", "update", "delete", "query"]) + """Estimated difference between treatment and baseline. Required.""" + p_value: float = rest_field(name="pValue", visibility=["read", "create", "update", "delete", "query"]) + """P-value for the treatment effect. Required.""" + treatment_effect: Union[str, "_models.TreatmentEffectType"] = rest_field( + name="treatmentEffect", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of treatment effect. Required. Known values are: \"TooFewSamples\", \"Inconclusive\", + \"Changed\", \"Improved\", and \"Degraded\".""" + + @overload + def __init__( + self, + *, + treatment_run_id: str, + treatment_run_summary: "_models.EvalRunResultSummary", + delta_estimate: float, + p_value: float, + treatment_effect: Union[str, "_models.TreatmentEffectType"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvalRunResultComparison(_Model): + """Comparison results for treatment runs against the baseline. + + :ivar testing_criteria: Name of the testing criteria. Required. + :vartype testing_criteria: str + :ivar metric: Metric being evaluated. Required. + :vartype metric: str + :ivar evaluator: Name of the evaluator for this testing criteria. Required. + :vartype evaluator: str + :ivar baseline_run_summary: Summary statistics of the baseline run. Required. + :vartype baseline_run_summary: ~azure.ai.projects.models.EvalRunResultSummary + :ivar compare_items: List of comparison results for each treatment run. Required. + :vartype compare_items: list[~azure.ai.projects.models.EvalRunResultCompareItem] + """ + + testing_criteria: str = rest_field( + name="testingCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the testing criteria. Required.""" + metric: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metric being evaluated. Required.""" + evaluator: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the evaluator for this testing criteria. Required.""" + baseline_run_summary: "_models.EvalRunResultSummary" = rest_field( + name="baselineRunSummary", visibility=["read", "create", "update", "delete", "query"] + ) + """Summary statistics of the baseline run. Required.""" + compare_items: list["_models.EvalRunResultCompareItem"] = rest_field( + name="compareItems", visibility=["read", "create", "update", "delete", "query"] + ) + """List of comparison results for each treatment run. Required.""" + + @overload + def __init__( + self, + *, + testing_criteria: str, + metric: str, + evaluator: str, + baseline_run_summary: "_models.EvalRunResultSummary", + compare_items: list["_models.EvalRunResultCompareItem"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvalRunResultSummary(_Model): + """Summary statistics of a metric in an evaluation run. + + :ivar run_id: The evaluation run ID. Required. + :vartype run_id: str + :ivar sample_count: Number of samples in the evaluation run. Required. + :vartype sample_count: int + :ivar average: Average value of the metric in the evaluation run. Required. + :vartype average: float + :ivar standard_deviation: Standard deviation of the metric in the evaluation run. Required. + :vartype standard_deviation: float + """ + + run_id: str = rest_field(name="runId", visibility=["read", "create", "update", "delete", "query"]) + """The evaluation run ID. Required.""" + sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) + """Number of samples in the evaluation run. Required.""" + average: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Average value of the metric in the evaluation run. Required.""" + standard_deviation: float = rest_field( + name="standardDeviation", visibility=["read", "create", "update", "delete", "query"] + ) + """Standard deviation of the metric in the evaluation run. Required.""" + + @overload + def __init__( + self, + *, + run_id: str, + sample_count: int, + average: float, + standard_deviation: float, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationComparisonRequest(InsightRequest, discriminator="EvaluationComparison"): + """Evaluation Comparison Request. + + :ivar type: The type of request. Required. Evaluation Comparison. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_COMPARISON + :ivar eval_id: Identifier for the evaluation. Required. + :vartype eval_id: str + :ivar baseline_run_id: The baseline run ID for comparison. Required. + :vartype baseline_run_id: str + :ivar treatment_run_ids: List of treatment run IDs for comparison. Required. + :vartype treatment_run_ids: list[str] + """ + + type: Literal[InsightType.EVALUATION_COMPARISON] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of request. Required. Evaluation Comparison.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier for the evaluation. Required.""" + baseline_run_id: str = rest_field(name="baselineRunId", visibility=["read", "create", "update", "delete", "query"]) + """The baseline run ID for comparison. Required.""" + treatment_run_ids: list[str] = rest_field( + name="treatmentRunIds", visibility=["read", "create", "update", "delete", "query"] + ) + """List of treatment run IDs for comparison. Required.""" + + @overload + def __init__( + self, + *, + eval_id: str, + baseline_run_id: str, + treatment_run_ids: list[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_COMPARISON # type: ignore + + +class InsightSample(_Model): + """A sample from the analysis. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EvaluationResultSample + + :ivar id: The unique identifier for the analysis sample. Required. + :vartype id: str + :ivar type: Sample type. Required. "EvaluationResultSample" + :vartype type: str or ~azure.ai.projects.models.SampleType + :ivar features: Features to help with additional filtering of data in UX. Required. + :vartype features: dict[str, any] + :ivar correlation_info: Info about the correlation for the analysis sample. Required. + :vartype correlation_info: dict[str, any] + """ + + __mapping__: dict[str, _Model] = {} + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier for the analysis sample. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Sample type. Required. \"EvaluationResultSample\"""" + features: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Features to help with additional filtering of data in UX. Required.""" + correlation_info: dict[str, Any] = rest_field( + name="correlationInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """Info about the correlation for the analysis sample. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + type: str, + features: dict[str, Any], + correlation_info: dict[str, Any], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationResultSample(InsightSample, discriminator="EvaluationResultSample"): + """A sample from the evaluation result. + + :ivar id: The unique identifier for the analysis sample. Required. + :vartype id: str + :ivar features: Features to help with additional filtering of data in UX. Required. + :vartype features: dict[str, any] + :ivar correlation_info: Info about the correlation for the analysis sample. Required. + :vartype correlation_info: dict[str, any] + :ivar type: Evaluation Result Sample Type. Required. A sample from the evaluation result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RESULT_SAMPLE + :ivar evaluation_result: Evaluation result for the analysis sample. Required. + :vartype evaluation_result: ~azure.ai.projects.models.EvalResult + """ + + type: Literal[SampleType.EVALUATION_RESULT_SAMPLE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Evaluation Result Sample Type. Required. A sample from the evaluation result.""" + evaluation_result: "_models.EvalResult" = rest_field( + name="evaluationResult", visibility=["read", "create", "update", "delete", "query"] + ) + """Evaluation result for the analysis sample. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + features: dict[str, Any], + correlation_info: dict[str, Any], + evaluation_result: "_models.EvalResult", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = SampleType.EVALUATION_RESULT_SAMPLE # type: ignore + + +class EvaluationRule(_Model): + """Evaluation rule model. + + :ivar id: Unique identifier for the evaluation rule. Required. + :vartype id: str + :ivar display_name: Display Name for the evaluation rule. + :vartype display_name: str + :ivar description: Description for the evaluation rule. + :vartype description: str + :ivar action: Definition of the evaluation rule action. Required. + :vartype action: ~azure.ai.projects.models.EvaluationRuleAction + :ivar filter: Filter condition of the evaluation rule. + :vartype filter: ~azure.ai.projects.models.EvaluationRuleFilter + :ivar event_type: Event type that the evaluation rule applies to. Required. Known values are: + "response.completed" and "manual". + :vartype event_type: str or ~azure.ai.projects.models.EvaluationRuleEventType + :ivar enabled: Indicates whether the evaluation rule is enabled. Default is true. Required. + :vartype enabled: bool + :ivar system_data: System metadata for the evaluation rule. Required. + :vartype system_data: dict[str, str] + """ + + id: str = rest_field(visibility=["read"]) + """Unique identifier for the evaluation rule. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display Name for the evaluation rule.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description for the evaluation rule.""" + action: "_models.EvaluationRuleAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Definition of the evaluation rule action. Required.""" + filter: Optional["_models.EvaluationRuleFilter"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Filter condition of the evaluation rule.""" + event_type: Union[str, "_models.EvaluationRuleEventType"] = rest_field( + name="eventType", visibility=["read", "create", "update", "delete", "query"] + ) + """Event type that the evaluation rule applies to. Required. Known values are: + \"response.completed\" and \"manual\".""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates whether the evaluation rule is enabled. Default is true. Required.""" + system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) + """System metadata for the evaluation rule. Required.""" + + @overload + def __init__( + self, + *, + action: "_models.EvaluationRuleAction", + event_type: Union[str, "_models.EvaluationRuleEventType"], + enabled: bool, + display_name: Optional[str] = None, + description: Optional[str] = None, + filter: Optional["_models.EvaluationRuleFilter"] = None, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationRuleFilter(_Model): + """Evaluation filter model. + + :ivar agent_name: Filter by agent name. Required. + :vartype agent_name: str + """ + + agent_name: str = rest_field(name="agentName", visibility=["read", "create", "update", "delete", "query"]) + """Filter by agent name. Required.""" + + @overload + def __init__( + self, + *, + agent_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationRunClusterInsightResult(InsightResult, discriminator="EvaluationRunClusterInsight"): + """Insights from the evaluation run cluster analysis. + + :ivar type: The type of insights result. Required. Insights on an Evaluation run result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT + :ivar cluster_insight: Required. + :vartype cluster_insight: ~azure.ai.projects.models.ClusterInsightResult + """ + + type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights result. Required. Insights on an Evaluation run result.""" + cluster_insight: "_models.ClusterInsightResult" = rest_field( + name="clusterInsight", visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + + @overload + def __init__( + self, + *, + cluster_insight: "_models.ClusterInsightResult", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore + + +class EvaluationRunClusterInsightsRequest(InsightRequest, discriminator="EvaluationRunClusterInsight"): + """Insights on set of Evaluation Results. + + :ivar type: The type of insights request. Required. Insights on an Evaluation run result. + :vartype type: str or ~azure.ai.projects.models.EVALUATION_RUN_CLUSTER_INSIGHT + :ivar eval_id: Evaluation Id for the insights. Required. + :vartype eval_id: str + :ivar run_ids: List of evaluation run IDs for the insights. Required. + :vartype run_ids: list[str] + :ivar model_configuration: Configuration of the model used in the insight generation. + :vartype model_configuration: ~azure.ai.projects.models.InsightModelConfiguration + """ + + type: Literal[InsightType.EVALUATION_RUN_CLUSTER_INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of insights request. Required. Insights on an Evaluation run result.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Evaluation Id for the insights. Required.""" + run_ids: list[str] = rest_field(name="runIds", visibility=["read", "create", "update", "delete", "query"]) + """List of evaluation run IDs for the insights. Required.""" + model_configuration: Optional["_models.InsightModelConfiguration"] = rest_field( + name="modelConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration of the model used in the insight generation.""" + + @overload + def __init__( + self, + *, + eval_id: str, + run_ids: list[str], + model_configuration: Optional["_models.InsightModelConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = InsightType.EVALUATION_RUN_CLUSTER_INSIGHT # type: ignore + + +class ScheduleTask(_Model): + """Schedule task model. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + EvaluationScheduleTask, InsightScheduleTask + + :ivar type: Type of the task. Required. Known values are: "Evaluation" and "Insight". + :vartype type: str or ~azure.ai.projects.models.ScheduleTaskType + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the task. Required. Known values are: \"Evaluation\" and \"Insight\".""" + configuration: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Configuration for the task.""" + + @overload + def __init__( + self, + *, + type: str, + configuration: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluationScheduleTask(ScheduleTask, discriminator="Evaluation"): + """Evaluation task for the schedule. + + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + :ivar type: Required. Evaluation task. + :vartype type: str or ~azure.ai.projects.models.EVALUATION + :ivar eval_id: Identifier of the evaluation group. Required. + :vartype eval_id: str + :ivar eval_run: The evaluation run payload. Required. + :vartype eval_run: any + """ + + type: Literal[ScheduleTaskType.EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Evaluation task.""" + eval_id: str = rest_field(name="evalId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier of the evaluation group. Required.""" + eval_run: Any = rest_field(name="evalRun", visibility=["read", "create", "update", "delete", "query"]) + """The evaluation run payload. Required.""" + + @overload + def __init__( + self, + *, + eval_id: str, + eval_run: Any, + configuration: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ScheduleTaskType.EVALUATION # type: ignore + + +class EvaluationTaxonomy(_Model): + """Evaluation Taxonomy Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar taxonomy_input: Input configuration for the evaluation taxonomy. Required. + :vartype taxonomy_input: ~azure.ai.projects.models.EvaluationTaxonomyInput + :ivar taxonomy_categories: List of taxonomy categories. + :vartype taxonomy_categories: list[~azure.ai.projects.models.TaxonomyCategory] + :ivar properties: Additional properties for the evaluation taxonomy. + :vartype properties: dict[str, str] + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + taxonomy_input: "_models.EvaluationTaxonomyInput" = rest_field( + name="taxonomyInput", visibility=["read", "create", "update", "delete", "query"] + ) + """Input configuration for the evaluation taxonomy. Required.""" + taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = rest_field( + name="taxonomyCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of taxonomy categories.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional properties for the evaluation taxonomy.""" + + @overload + def __init__( + self, + *, + taxonomy_input: "_models.EvaluationTaxonomyInput", + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + taxonomy_categories: Optional[list["_models.TaxonomyCategory"]] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluatorMetric(_Model): + """Evaluator Metric. + + :ivar type: Type of the metric. Known values are: "ordinal", "continuous", and "boolean". + :vartype type: str or ~azure.ai.projects.models.EvaluatorMetricType + :ivar desirable_direction: It indicates whether a higher value is better or a lower value is + better for this metric. Known values are: "increase", "decrease", and "neutral". + :vartype desirable_direction: str or ~azure.ai.projects.models.EvaluatorMetricDirection + :ivar min_value: Minimum value for the metric. + :vartype min_value: float + :ivar max_value: Maximum value for the metric. If not specified, it is assumed to be unbounded. + :vartype max_value: float + :ivar is_primary: Indicates if this metric is primary when there are multiple metrics. + :vartype is_primary: bool + """ + + type: Optional[Union[str, "_models.EvaluatorMetricType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the metric. Known values are: \"ordinal\", \"continuous\", and \"boolean\".""" + desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """It indicates whether a higher value is better or a lower value is better for this metric. Known + values are: \"increase\", \"decrease\", and \"neutral\".""" + min_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Minimum value for the metric.""" + max_value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Maximum value for the metric. If not specified, it is assumed to be unbounded.""" + is_primary: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates if this metric is primary when there are multiple metrics.""" + + @overload + def __init__( + self, + *, + type: Optional[Union[str, "_models.EvaluatorMetricType"]] = None, + desirable_direction: Optional[Union[str, "_models.EvaluatorMetricDirection"]] = None, + min_value: Optional[float] = None, + max_value: Optional[float] = None, + is_primary: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class EvaluatorVersion(_Model): + """Evaluator Definition. + + :ivar display_name: Display Name for evaluator. It helps to find the evaluator easily in AI + Foundry. It does not need to be unique. + :vartype display_name: str + :ivar metadata: Metadata about the evaluator. + :vartype metadata: dict[str, str] + :ivar evaluator_type: The type of the evaluator. Required. Known values are: "builtin" and + "custom". + :vartype evaluator_type: str or ~azure.ai.projects.models.EvaluatorType + :ivar categories: The categories of the evaluator. Required. + :vartype categories: list[str or ~azure.ai.projects.models.EvaluatorCategory] + :ivar definition: Definition of the evaluator. Required. + :vartype definition: ~azure.ai.projects.models.EvaluatorDefinition + :ivar created_by: Creator of the evaluator. Required. + :vartype created_by: str + :ivar created_at: Creation date/time of the evaluator. Required. + :vartype created_at: int + :ivar modified_at: Last modified date/time of the evaluator. Required. + :vartype modified_at: int + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + display_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Display Name for evaluator. It helps to find the evaluator easily in AI Foundry. It does not + need to be unique.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metadata about the evaluator.""" + evaluator_type: Union[str, "_models.EvaluatorType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of the evaluator. Required. Known values are: \"builtin\" and \"custom\".""" + categories: list[Union[str, "_models.EvaluatorCategory"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The categories of the evaluator. Required.""" + definition: "_models.EvaluatorDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Definition of the evaluator. Required.""" + created_by: str = rest_field(visibility=["read"]) + """Creator of the evaluator. Required.""" + created_at: int = rest_field(visibility=["read"]) + """Creation date/time of the evaluator. Required.""" + modified_at: int = rest_field(visibility=["read"]) + """Last modified date/time of the evaluator. Required.""" + id: Optional[str] = rest_field(visibility=["read"]) + """Asset ID, a unique identifier for the asset.""" + name: str = rest_field(visibility=["read"]) + """The name of the resource. Required.""" + version: str = rest_field(visibility=["read"]) + """The version of the resource. Required.""" + description: Optional[str] = rest_field(visibility=["create", "update"]) + """The asset description text.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["create", "update"]) + """Tag dictionary. Tags can be added, removed, and updated.""" + + @overload + def __init__( + self, + *, + evaluator_type: Union[str, "_models.EvaluatorType"], + categories: list[Union[str, "_models.EvaluatorCategory"]], + definition: "_models.EvaluatorDefinition", + display_name: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FabricDataAgentToolParameters(_Model): + """The fabric data agent tool parameters. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. + :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] + """ + + project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" + + @overload + def __init__( + self, + *, + project_connections: Optional[list["_models.ToolProjectConnection"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FieldMapping(_Model): + """Field mapping configuration class. + + :ivar content_fields: List of fields with text content. Required. + :vartype content_fields: list[str] + :ivar filepath_field: Path of file to be used as a source of text content. + :vartype filepath_field: str + :ivar title_field: Field containing the title of the document. + :vartype title_field: str + :ivar url_field: Field containing the url of the document. + :vartype url_field: str + :ivar vector_fields: List of fields with vector content. + :vartype vector_fields: list[str] + :ivar metadata_fields: List of fields with metadata content. + :vartype metadata_fields: list[str] + """ + + content_fields: list[str] = rest_field(name="contentFields", visibility=["create"]) + """List of fields with text content. Required.""" + filepath_field: Optional[str] = rest_field(name="filepathField", visibility=["create"]) + """Path of file to be used as a source of text content.""" + title_field: Optional[str] = rest_field(name="titleField", visibility=["create"]) + """Field containing the title of the document.""" + url_field: Optional[str] = rest_field(name="urlField", visibility=["create"]) + """Field containing the url of the document.""" + vector_fields: Optional[list[str]] = rest_field(name="vectorFields", visibility=["create"]) + """List of fields with vector content.""" + metadata_fields: Optional[list[str]] = rest_field(name="metadataFields", visibility=["create"]) + """List of fields with metadata content.""" + + @overload + def __init__( + self, + *, + content_fields: list[str], + filepath_field: Optional[str] = None, + title_field: Optional[str] = None, + url_field: Optional[str] = None, + vector_fields: Optional[list[str]] = None, + metadata_fields: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): + """FileDatasetVersion Definition. + + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. + :vartype data_uri: str + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI file. + :vartype type: str or ~azure.ai.projects.models.URI_FILE + """ + + type: Literal[DatasetType.URI_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI file.""" + + @overload + def __init__( + self, + *, + data_uri: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = DatasetType.URI_FILE # type: ignore + + +class FileSearchTool(Tool, discriminator="file_search"): + """A tool that searches for relevant content from uploaded files. Learn more about the `file + search tool `_. + + :ivar type: The type of the file search tool. Always ``file_search``. Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH + :ivar vector_store_ids: The IDs of the vector stores to search. Required. + :vartype vector_store_ids: list[str] + :ivar max_num_results: The maximum number of results to return. This number should be between 1 + and 50 inclusive. + :vartype max_num_results: int + :ivar ranking_options: Ranking options for search. + :vartype ranking_options: ~azure.ai.projects.models.RankingOptions + :ivar filters: A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type. + :vartype filters: ~azure.ai.projects.models.ComparisonFilter or + ~azure.ai.projects.models.CompoundFilter + """ + + type: Literal[ToolType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the file search tool. Always ``file_search``. Required.""" + vector_store_ids: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The IDs of the vector stores to search. Required.""" + max_num_results: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of results to return. This number should be between 1 and 50 inclusive.""" + ranking_options: Optional["_models.RankingOptions"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Ranking options for search.""" + filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type.""" + + @overload + def __init__( + self, + *, + vector_store_ids: list[str], + max_num_results: Optional[int] = None, + ranking_options: Optional["_models.RankingOptions"] = None, + filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.FILE_SEARCH # type: ignore + + +class FileSearchToolCallItemParam(ItemParam, discriminator="file_search_call"): + """The results of a file search tool call. See the + `file search guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL + :ivar queries: The queries used to search for files. Required. + :vartype queries: list[str] + :ivar results: The results of the file search tool call. + :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] + """ + + type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The queries used to search for files. Required.""" + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results of the file search tool call.""" + + @overload + def __init__( + self, + *, + queries: list[str], + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FILE_SEARCH_CALL # type: ignore + + +class FileSearchToolCallItemParamResult(_Model): + """FileSearchToolCallItemParamResult. + + :ivar file_id: The unique ID of the file. + :vartype file_id: str + :ivar text: The text that was retrieved from the file. + :vartype text: str + :ivar filename: The name of the file. + :vartype filename: str + :ivar attributes: + :vartype attributes: ~azure.ai.projects.models.VectorStoreFileAttributes + :ivar score: The relevance score of the file - a value between 0 and 1. + :vartype score: float + """ + + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the file.""" + text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text that was retrieved from the file.""" + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file.""" + attributes: Optional["_models.VectorStoreFileAttributes"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + score: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The relevance score of the file - a value between 0 and 1.""" + + @overload + def __init__( + self, + *, + file_id: Optional[str] = None, + text: Optional[str] = None, + filename: Optional[str] = None, + attributes: Optional["_models.VectorStoreFileAttributes"] = None, + score: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileSearchToolCallItemResource(ItemResource, discriminator="file_search_call"): + """The results of a file search tool call. See the + `file search guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH_CALL + :ivar status: The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["incomplete"], + Literal["failed"] + :vartype status: str or str or str or str or str + :ivar queries: The queries used to search for files. Required. + :vartype queries: list[str] + :ivar results: The results of the file search tool call. + :vartype results: list[~azure.ai.projects.models.FileSearchToolCallItemParamResult] + """ + + type: Literal[ItemType.FILE_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the file search tool call. One of ``in_progress``, + ``searching``, ``incomplete`` or ``failed``,. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"failed\"]""" + queries: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The queries used to search for files. Required.""" + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results of the file search tool call.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], + queries: list[str], + created_by: Optional["_models.CreatedBy"] = None, + results: Optional[list["_models.FileSearchToolCallItemParamResult"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FILE_SEARCH_CALL # type: ignore + + +class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): + """FileDatasetVersion Definition. + + :ivar data_uri: URI of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 + `_. Required. + :vartype data_uri: str + :ivar is_reference: Indicates if the dataset holds a reference to the storage, or the dataset + manages storage itself. If true, the underlying data will not be deleted when the dataset + version is deleted. + :vartype is_reference: bool + :ivar connection_name: The Azure Storage Account connection name. Required if + startPendingUploadVersion was not called before creating the Dataset. + :vartype connection_name: str + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Dataset type. Required. URI folder. + :vartype type: str or ~azure.ai.projects.models.URI_FOLDER + """ + + type: Literal[DatasetType.URI_FOLDER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Dataset type. Required. URI folder.""" + + @overload + def __init__( + self, + *, + data_uri: str, + connection_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = DatasetType.URI_FOLDER # type: ignore + + +class FunctionTool(Tool, discriminator="function"): + """Defines a function in your own code the model can choose to call. Learn more about `function + calling `_. + + :ivar type: The type of the function tool. Always ``function``. Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION + :ivar name: The name of the function to call. Required. + :vartype name: str + :ivar description: A description of the function. Used by the model to determine whether or not + to call the function. + :vartype description: str + :ivar parameters: A JSON schema object describing the parameters of the function. Required. + :vartype parameters: any + :ivar strict: Whether to enforce strict parameter validation. Default ``true``. Required. + :vartype strict: bool + """ + + type: Literal[ToolType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the function tool. Always ``function``. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to call. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of the function. Used by the model to determine whether or not to call the + function.""" + parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON schema object describing the parameters of the function. Required.""" + strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enforce strict parameter validation. Default ``true``. Required.""" + + @overload + def __init__( + self, + *, + name: str, + parameters: Any, + strict: bool, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.FUNCTION # type: ignore + + +class FunctionToolCallItemParam(ItemParam, discriminator="function_call"): + """A tool call to run a function. See the + `function calling guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar name: The name of the function to run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments to pass to the function. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments to pass to the function. Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + name: str, + arguments: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL # type: ignore + + +class FunctionToolCallItemResource(ItemResource, discriminator="function_call"): + """A tool call to run a function. See the + `function calling guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar name: The name of the function to run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments to pass to the function. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.FUNCTION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments to pass to the function. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + name: str, + arguments: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL # type: ignore + + +class FunctionToolCallOutputItemParam(ItemParam, discriminator="function_call_output"): + """The output of a function tool call. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar output: A JSON string of the output of the function tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the function tool call. Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + output: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore + + +class FunctionToolCallOutputItemResource(ItemResource, discriminator="function_call_output"): + """The output of a function tool call. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION_CALL_OUTPUT + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The unique ID of the function tool call generated by the model. Required. + :vartype call_id: str + :ivar output: A JSON string of the output of the function tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.FUNCTION_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the function tool call generated by the model. Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the function tool call. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + output: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.FUNCTION_CALL_OUTPUT # type: ignore + + +class HostedAgentDefinition(AgentDefinition, discriminator="hosted"): + """The hosted agent definition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ImageBasedHostedAgentDefinition + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.HOSTED + :ivar tools: An array of tools the hosted agent's model may call while generating a response. + You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar cpu: The CPU configuration for the hosted agent. Required. + :vartype cpu: str + :ivar memory: The memory configuration for the hosted agent. Required. + :vartype memory: str + :ivar environment_variables: Environment variables to set in the hosted agent container. + :vartype environment_variables: dict[str, str] + """ + + __mapping__: dict[str, _Model] = {} + kind: Literal[AgentKind.HOSTED] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the hosted agent's model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter.""" + container_protocol_versions: list["_models.ProtocolVersionRecord"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocols that the agent supports for ingress communication of the containers. Required.""" + cpu: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The CPU configuration for the hosted agent. Required.""" + memory: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The memory configuration for the hosted agent. Required.""" + environment_variables: Optional[dict[str, str]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Environment variables to set in the hosted agent container.""" + + @overload + def __init__( + self, + *, + container_protocol_versions: list["_models.ProtocolVersionRecord"], + cpu: str, + memory: str, + rai_config: Optional["_models.RaiConfig"] = None, + tools: Optional[list["_models.Tool"]] = None, + environment_variables: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.HOSTED # type: ignore + + +class HourlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Hourly"): + """Hourly recurrence schedule. + + :ivar type: Required. Hourly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.HOURLY + """ + + type: Literal[RecurrenceType.HOURLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Hourly recurrence pattern.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.HOURLY # type: ignore + + +class HumanEvaluationRuleAction(EvaluationRuleAction, discriminator="humanEvaluation"): + """Evaluation rule action for human evaluation. + + :ivar type: Required. Human evaluation. + :vartype type: str or ~azure.ai.projects.models.HUMAN_EVALUATION + :ivar template_id: Human evaluation template Id. Required. + :vartype template_id: str + """ + + type: Literal[EvaluationRuleActionType.HUMAN_EVALUATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Human evaluation.""" + template_id: str = rest_field(name="templateId", visibility=["read", "create", "update", "delete", "query"]) + """Human evaluation template Id. Required.""" + + @overload + def __init__( + self, + *, + template_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluationRuleActionType.HUMAN_EVALUATION # type: ignore + + +class ImageBasedHostedAgentDefinition(HostedAgentDefinition, discriminator="hosted"): + """The image-based deployment definition for a hosted agent. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar tools: An array of tools the hosted agent's model may call while generating a response. + You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar container_protocol_versions: The protocols that the agent supports for ingress + communication of the containers. Required. + :vartype container_protocol_versions: list[~azure.ai.projects.models.ProtocolVersionRecord] + :ivar cpu: The CPU configuration for the hosted agent. Required. + :vartype cpu: str + :ivar memory: The memory configuration for the hosted agent. Required. + :vartype memory: str + :ivar environment_variables: Environment variables to set in the hosted agent container. + :vartype environment_variables: dict[str, str] + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.HOSTED + :ivar image: The image for the hosted agent. Required. + :vartype image: str + """ + + image: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The image for the hosted agent. Required.""" + + @overload + def __init__( + self, + *, + container_protocol_versions: list["_models.ProtocolVersionRecord"], + cpu: str, + memory: str, + image: str, + rai_config: Optional["_models.RaiConfig"] = None, + tools: Optional[list["_models.Tool"]] = None, + environment_variables: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImageGenTool(Tool, discriminator="image_generation"): + """A tool that generates images using a model like ``gpt-image-1``. + + :ivar type: The type of the image generation tool. Always ``image_generation``. Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION + :ivar model: The image generation model to use. Default: ``gpt-image-1``. Default value is + "gpt-image-1". + :vartype model: str + :ivar quality: The quality of the generated image. One of ``low``, ``medium``, ``high``, + or ``auto``. Default: ``auto``. Is one of the following types: Literal["low"], + Literal["medium"], Literal["high"], Literal["auto"] + :vartype quality: str or str or str or str + :ivar size: The size of the generated image. One of ``1024x1024``, ``1024x1536``, + ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal["1024x1024"], Literal["1024x1536"], Literal["1536x1024"], Literal["auto"] + :vartype size: str or str or str or str + :ivar output_format: The output format of the generated image. One of ``png``, ``webp``, or + ``jpeg``. Default: ``png``. Is one of the following types: Literal["png"], Literal["webp"], + Literal["jpeg"] + :vartype output_format: str or str or str + :ivar output_compression: Compression level for the output image. Default: 100. + :vartype output_compression: int + :ivar moderation: Moderation level for the generated image. Default: ``auto``. Is either a + Literal["auto"] type or a Literal["low"] type. + :vartype moderation: str or str + :ivar background: Background type for the generated image. One of ``transparent``, + ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal["transparent"], Literal["opaque"], Literal["auto"] + :vartype background: str or str or str + :ivar input_image_mask: Optional mask for inpainting. Contains ``image_url`` + (string, optional) and ``file_id`` (string, optional). + :vartype input_image_mask: ~azure.ai.projects.models.ImageGenToolInputImageMask + :ivar partial_images: Number of partial images to generate in streaming mode, from 0 (default + value) to 3. + :vartype partial_images: int + """ + + type: Literal[ToolType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the image generation tool. Always ``image_generation``. Required.""" + model: Optional[Literal["gpt-image-1"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The image generation model to use. Default: ``gpt-image-1``. Default value is \"gpt-image-1\".""" + quality: Optional[Literal["low", "medium", "high", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The quality of the generated image. One of ``low``, ``medium``, ``high``, + or ``auto``. Default: ``auto``. Is one of the following types: Literal[\"low\"], + Literal[\"medium\"], Literal[\"high\"], Literal[\"auto\"]""" + size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The size of the generated image. One of ``1024x1024``, ``1024x1536``, + ``1536x1024``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal[\"1024x1024\"], Literal[\"1024x1536\"], Literal[\"1536x1024\"], Literal[\"auto\"]""" + output_format: Optional[Literal["png", "webp", "jpeg"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The output format of the generated image. One of ``png``, ``webp``, or + ``jpeg``. Default: ``png``. Is one of the following types: Literal[\"png\"], Literal[\"webp\"], + Literal[\"jpeg\"]""" + output_compression: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Compression level for the output image. Default: 100.""" + moderation: Optional[Literal["auto", "low"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Moderation level for the generated image. Default: ``auto``. Is either a Literal[\"auto\"] type + or a Literal[\"low\"] type.""" + background: Optional[Literal["transparent", "opaque", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Background type for the generated image. One of ``transparent``, + ``opaque``, or ``auto``. Default: ``auto``. Is one of the following types: + Literal[\"transparent\"], Literal[\"opaque\"], Literal[\"auto\"]""" + input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional mask for inpainting. Contains ``image_url`` + (string, optional) and ``file_id`` (string, optional).""" + partial_images: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Number of partial images to generate in streaming mode, from 0 (default value) to 3.""" + + @overload + def __init__( + self, + *, + model: Optional[Literal["gpt-image-1"]] = None, + quality: Optional[Literal["low", "medium", "high", "auto"]] = None, + size: Optional[Literal["1024x1024", "1024x1536", "1536x1024", "auto"]] = None, + output_format: Optional[Literal["png", "webp", "jpeg"]] = None, + output_compression: Optional[int] = None, + moderation: Optional[Literal["auto", "low"]] = None, + background: Optional[Literal["transparent", "opaque", "auto"]] = None, + input_image_mask: Optional["_models.ImageGenToolInputImageMask"] = None, + partial_images: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.IMAGE_GENERATION # type: ignore + + +class ImageGenToolCallItemParam(ItemParam, discriminator="image_generation_call"): + """An image generation request made by the model. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL + :ivar result: The generated image encoded in base64. Required. + :vartype result: str + """ + + type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The generated image encoded in base64. Required.""" + + @overload + def __init__( + self, + *, + result: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore + + +class ImageGenToolCallItemResource(ItemResource, discriminator="image_generation_call"): + """An image generation request made by the model. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION_CALL + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["generating"], Literal["failed"] + :vartype status: str or str or str or str + :ivar result: The generated image encoded in base64. Required. + :vartype result: str + """ + + type: Literal[ItemType.IMAGE_GENERATION_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "generating", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"generating\"], Literal[\"failed\"]""" + result: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The generated image encoded in base64. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "generating", "failed"], + result: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.IMAGE_GENERATION_CALL # type: ignore + + +class ImageGenToolInputImageMask(_Model): + """ImageGenToolInputImageMask. + + :ivar image_url: Base64-encoded mask image. + :vartype image_url: str + :ivar file_id: File ID for the mask image. + :vartype file_id: str + """ + + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded mask image.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """File ID for the mask image.""" + + @overload + def __init__( + self, + *, + image_url: Optional[str] = None, + file_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Insight(_Model): + """The response body for cluster insights. + + :ivar id: The unique identifier for the insights report. Required. + :vartype id: str + :ivar metadata: Metadata about the insights report. Required. + :vartype metadata: ~azure.ai.projects.models.InsightsMetadata + :ivar state: The current state of the insights. Required. Known values are: "NotStarted", + "Running", "Succeeded", "Failed", and "Canceled". + :vartype state: str or ~azure.ai.projects.models.OperationState + :ivar display_name: User friendly display name for the insight. Required. + :vartype display_name: str + :ivar request: Request for the insights analysis. Required. + :vartype request: ~azure.ai.projects.models.InsightRequest + :ivar result: The result of the insights report. + :vartype result: ~azure.ai.projects.models.InsightResult + """ + + id: str = rest_field(visibility=["read"]) + """The unique identifier for the insights report. Required.""" + metadata: "_models.InsightsMetadata" = rest_field(visibility=["read"]) + """Metadata about the insights report. Required.""" + state: Union[str, "_models.OperationState"] = rest_field(visibility=["read"]) + """The current state of the insights. Required. Known values are: \"NotStarted\", \"Running\", + \"Succeeded\", \"Failed\", and \"Canceled\".""" + display_name: str = rest_field(name="displayName", visibility=["read", "create", "update", "delete", "query"]) + """User friendly display name for the insight. Required.""" + request: "_models.InsightRequest" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Request for the insights analysis. Required.""" + result: Optional["_models.InsightResult"] = rest_field(visibility=["read"]) + """The result of the insights report.""" + + @overload + def __init__( + self, + *, + display_name: str, + request: "_models.InsightRequest", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightCluster(_Model): + """A cluster of analysis samples. + + :ivar id: The id of the analysis cluster. Required. + :vartype id: str + :ivar label: Label for the cluster. Required. + :vartype label: str + :ivar suggestion: Suggestion for the cluster. Required. + :vartype suggestion: str + :ivar description: Description of the analysis cluster. Required. + :vartype description: str + :ivar weight: The weight of the analysis cluster. This indicate number of samples in the + cluster. Required. + :vartype weight: int + :ivar sub_clusters: List of subclusters within this cluster. Empty if no subclusters exist. + :vartype sub_clusters: list[~azure.ai.projects.models.InsightCluster] + :ivar samples: List of samples that belong to this cluster. Empty if samples are part of + subclusters. + :vartype samples: list[~azure.ai.projects.models.InsightSample] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The id of the analysis cluster. Required.""" + label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Label for the cluster. Required.""" + suggestion: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Suggestion for the cluster. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the analysis cluster. Required.""" + weight: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The weight of the analysis cluster. This indicate number of samples in the cluster. Required.""" + sub_clusters: Optional[list["_models.InsightCluster"]] = rest_field( + name="subClusters", visibility=["read", "create", "update", "delete", "query"] + ) + """List of subclusters within this cluster. Empty if no subclusters exist.""" + samples: Optional[list["_models.InsightSample"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of samples that belong to this cluster. Empty if samples are part of subclusters.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + label: str, + suggestion: str, + description: str, + weight: int, + sub_clusters: Optional[list["_models.InsightCluster"]] = None, + samples: Optional[list["_models.InsightSample"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightModelConfiguration(_Model): + """Configuration of the model used in the insight generation. + + :ivar model_deployment_name: The model deployment to be evaluated. Accepts either the + deployment name alone or with the connection name as '{connectionName}/'. + Required. + :vartype model_deployment_name: str + """ + + model_deployment_name: str = rest_field( + name="modelDeploymentName", visibility=["read", "create", "update", "delete", "query"] + ) + """The model deployment to be evaluated. Accepts either the deployment name alone or with the + connection name as '{connectionName}/'. Required.""" + + @overload + def __init__( + self, + *, + model_deployment_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightScheduleTask(ScheduleTask, discriminator="Insight"): + """Insight task for the schedule. + + :ivar configuration: Configuration for the task. + :vartype configuration: dict[str, str] + :ivar type: Required. Insight task. + :vartype type: str or ~azure.ai.projects.models.INSIGHT + :ivar insight: The insight payload. Required. + :vartype insight: ~azure.ai.projects.models.Insight + """ + + type: Literal[ScheduleTaskType.INSIGHT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Insight task.""" + insight: "_models.Insight" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The insight payload. Required.""" + + @overload + def __init__( + self, + *, + insight: "_models.Insight", + configuration: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ScheduleTaskType.INSIGHT # type: ignore + + +class InsightsMetadata(_Model): + """Metadata about the insights. + + :ivar created_at: The timestamp when the insights were created. Required. + :vartype created_at: ~datetime.datetime + :ivar completed_at: The timestamp when the insights were completed. + :vartype completed_at: ~datetime.datetime + """ + + created_at: datetime.datetime = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp when the insights were created. Required.""" + completed_at: Optional[datetime.datetime] = rest_field( + name="completedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp when the insights were completed.""" + + @overload + def __init__( + self, + *, + created_at: datetime.datetime, + completed_at: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InsightSummary(_Model): + """Summary of the error cluster analysis. + + :ivar sample_count: Total number of samples analyzed. Required. + :vartype sample_count: int + :ivar unique_subcluster_count: Total number of unique subcluster labels. Required. + :vartype unique_subcluster_count: int + :ivar unique_cluster_count: Total number of unique clusters. Required. + :vartype unique_cluster_count: int + :ivar method: Method used for clustering. Required. + :vartype method: str + :ivar usage: Token usage while performing clustering analysis. Required. + :vartype usage: ~azure.ai.projects.models.ClusterTokenUsage + """ + + sample_count: int = rest_field(name="sampleCount", visibility=["read", "create", "update", "delete", "query"]) + """Total number of samples analyzed. Required.""" + unique_subcluster_count: int = rest_field( + name="uniqueSubclusterCount", visibility=["read", "create", "update", "delete", "query"] + ) + """Total number of unique subcluster labels. Required.""" + unique_cluster_count: int = rest_field( + name="uniqueClusterCount", visibility=["read", "create", "update", "delete", "query"] + ) + """Total number of unique clusters. Required.""" + method: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Method used for clustering. Required.""" + usage: "_models.ClusterTokenUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Token usage while performing clustering analysis. Required.""" + + @overload + def __init__( + self, + *, + sample_count: int, + unique_subcluster_count: int, + unique_cluster_count: int, + method: str, + usage: "_models.ClusterTokenUsage", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WorkflowActionOutputItemResource(ItemResource, discriminator="workflow_action"): + """WorkflowActionOutputItemResource. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + InvokeAzureAgentWorkflowActionOutputItemResource + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION + :ivar kind: The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. + Default value is None. + :vartype kind: str + :ivar action_id: Unique identifier for the action. Required. + :vartype action_id: str + :ivar parent_action_id: ID of the parent action if this is a nested action. + :vartype parent_action_id: str + :ivar previous_action_id: ID of the previous action if this action follows another. + :vartype previous_action_id: str + :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). + Required. Is one of the following types: Literal["completed"], Literal["failed"], + Literal["in_progress"], Literal["cancelled"] + :vartype status: str or str or str or str + """ + + __mapping__: dict[str, _Model] = {} + type: Literal[ItemType.WORKFLOW_ACTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """The kind of CSDL action (e.g., 'SetVariable', 'InvokeAzureAgent'). Required. Default value is + None.""" + action_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier for the action. Required.""" + parent_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """ID of the parent action if this is a nested action.""" + previous_action_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """ID of the previous action if this action follows another.""" + status: Literal["completed", "failed", "in_progress", "cancelled"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). Required. Is + one of the following types: Literal[\"completed\"], Literal[\"failed\"], + Literal[\"in_progress\"], Literal[\"cancelled\"]""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + kind: str, + action_id: str, + status: Literal["completed", "failed", "in_progress", "cancelled"], + created_by: Optional["_models.CreatedBy"] = None, + parent_action_id: Optional[str] = None, + previous_action_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.WORKFLOW_ACTION # type: ignore + + +class InvokeAzureAgentWorkflowActionOutputItemResource( + WorkflowActionOutputItemResource, discriminator="InvokeAzureAgent" +): # pylint: disable=name-too-long + """Details about an agent invocation as part of a workflow action. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WORKFLOW_ACTION + :ivar action_id: Unique identifier for the action. Required. + :vartype action_id: str + :ivar parent_action_id: ID of the parent action if this is a nested action. + :vartype parent_action_id: str + :ivar previous_action_id: ID of the previous action if this action follows another. + :vartype previous_action_id: str + :ivar status: Status of the action (e.g., 'in_progress', 'completed', 'failed', 'cancelled'). + Required. Is one of the following types: Literal["completed"], Literal["failed"], + Literal["in_progress"], Literal["cancelled"] + :vartype status: str or str or str or str + :ivar kind: Required. Default value is "InvokeAzureAgent". + :vartype kind: str + :ivar agent: Agent id. Required. + :vartype agent: ~azure.ai.projects.models.AgentId + :ivar conversation_id: ID of the conversation for the agent invocation. + :vartype conversation_id: str + :ivar response_id: The response id for the agent invocation. Required. + :vartype response_id: str + """ + + __mapping__: dict[str, _Model] = {} + kind: Literal["InvokeAzureAgent"] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Default value is \"InvokeAzureAgent\".""" + agent: "_models.AgentId" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Agent id. Required.""" + conversation_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """ID of the conversation for the agent invocation.""" + response_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response id for the agent invocation. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + action_id: str, + status: Literal["completed", "failed", "in_progress", "cancelled"], + agent: "_models.AgentId", + response_id: str, + created_by: Optional["_models.CreatedBy"] = None, + parent_action_id: Optional[str] = None, + previous_action_id: Optional[str] = None, + conversation_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = "InvokeAzureAgent" # type: ignore + + +class ItemContent(_Model): + """ItemContent. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ItemContentInputAudio, ItemContentInputFile, ItemContentInputImage, ItemContentInputText, + ItemContentOutputAudio, ItemContentOutputText, ItemContentRefusal + + :ivar type: Required. Known values are: "input_text", "input_audio", "input_image", + "input_file", "output_text", "output_audio", and "refusal". + :vartype type: str or ~azure.ai.projects.models.ItemContentType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"input_text\", \"input_audio\", \"input_image\", \"input_file\", + \"output_text\", \"output_audio\", and \"refusal\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ItemContentInputAudio(ItemContent, discriminator="input_audio"): + """An audio input to the model. + + :ivar type: The type of the input item. Always ``input_audio``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_AUDIO + :ivar data: Base64-encoded audio data. Required. + :vartype data: str + :ivar format: The format of the audio data. Currently supported formats are ``mp3`` and + ``wav``. Required. Is either a Literal["mp3"] type or a Literal["wav"] type. + :vartype format: str or str + """ + + type: Literal[ItemContentType.INPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_audio``. Required.""" + data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded audio data. Required.""" + format: Literal["mp3", "wav"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The format of the audio data. Currently supported formats are ``mp3`` and + ``wav``. Required. Is either a Literal[\"mp3\"] type or a Literal[\"wav\"] type.""" + + @overload + def __init__( + self, + *, + data: str, + format: Literal["mp3", "wav"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_AUDIO # type: ignore + + +class ItemContentInputFile(ItemContent, discriminator="input_file"): + """A file input to the model. + + :ivar type: The type of the input item. Always ``input_file``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_FILE + :ivar file_id: The ID of the file to be sent to the model. + :vartype file_id: str + :ivar filename: The name of the file to be sent to the model. + :vartype filename: str + :ivar file_data: The content of the file to be sent to the model. + :vartype file_data: str + """ + + type: Literal[ItemContentType.INPUT_FILE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_file``. Required.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file to be sent to the model.""" + filename: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the file to be sent to the model.""" + file_data: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content of the file to be sent to the model.""" + + @overload + def __init__( + self, + *, + file_id: Optional[str] = None, + filename: Optional[str] = None, + file_data: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_FILE # type: ignore + + +class ItemContentInputImage(ItemContent, discriminator="input_image"): + """An image input to the model. Learn about `image inputs `_. + + :ivar type: The type of the input item. Always ``input_image``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_IMAGE + :ivar image_url: The URL of the image to be sent to the model. A fully qualified URL or base64 + encoded image in a data URL. + :vartype image_url: str + :ivar file_id: The ID of the file to be sent to the model. + :vartype file_id: str + :ivar detail: The detail level of the image to be sent to the model. One of ``high``, ``low``, + or ``auto``. Defaults to ``auto``. Is one of the following types: Literal["low"], + Literal["high"], Literal["auto"] + :vartype detail: str or str or str + """ + + type: Literal[ItemContentType.INPUT_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_image``. Required.""" + image_url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the image to be sent to the model. A fully qualified URL or base64 encoded image in + a data URL.""" + file_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the file to be sent to the model.""" + detail: Optional[Literal["low", "high", "auto"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The detail level of the image to be sent to the model. One of ``high``, ``low``, or ``auto``. + Defaults to ``auto``. Is one of the following types: Literal[\"low\"], Literal[\"high\"], + Literal[\"auto\"]""" + + @overload + def __init__( + self, + *, + image_url: Optional[str] = None, + file_id: Optional[str] = None, + detail: Optional[Literal["low", "high", "auto"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_IMAGE # type: ignore + + +class ItemContentInputText(ItemContent, discriminator="input_text"): + """A text input to the model. + + :ivar type: The type of the input item. Always ``input_text``. Required. + :vartype type: str or ~azure.ai.projects.models.INPUT_TEXT + :ivar text: The text input to the model. Required. + :vartype text: str + """ + + type: Literal[ItemContentType.INPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the input item. Always ``input_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text input to the model. Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.INPUT_TEXT # type: ignore + + +class ItemContentOutputAudio(ItemContent, discriminator="output_audio"): + """An audio output from the model. + + :ivar type: The type of the output audio. Always ``output_audio``. Required. + :vartype type: str or ~azure.ai.projects.models.OUTPUT_AUDIO + :ivar data: Base64-encoded audio data from the model. Required. + :vartype data: str + :ivar transcript: The transcript of the audio data from the model. Required. + :vartype transcript: str + """ + + type: Literal[ItemContentType.OUTPUT_AUDIO] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output audio. Always ``output_audio``. Required.""" + data: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded audio data from the model. Required.""" + transcript: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The transcript of the audio data from the model. Required.""" + + @overload + def __init__( + self, + *, + data: str, + transcript: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.OUTPUT_AUDIO # type: ignore + + +class ItemContentOutputText(ItemContent, discriminator="output_text"): + """A text output from the model. + + :ivar type: The type of the output text. Always ``output_text``. Required. + :vartype type: str or ~azure.ai.projects.models.OUTPUT_TEXT + :ivar text: The text output from the model. Required. + :vartype text: str + :ivar annotations: The annotations of the text output. Required. + :vartype annotations: list[~azure.ai.projects.models.Annotation] + :ivar logprobs: + :vartype logprobs: list[~azure.ai.projects.models.LogProb] + """ + + type: Literal[ItemContentType.OUTPUT_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the output text. Always ``output_text``. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text output from the model. Required.""" + annotations: list["_models.Annotation"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The annotations of the text output. Required.""" + logprobs: Optional[list["_models.LogProb"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + + @overload + def __init__( + self, + *, + text: str, + annotations: list["_models.Annotation"], + logprobs: Optional[list["_models.LogProb"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.OUTPUT_TEXT # type: ignore + + +class ItemContentRefusal(ItemContent, discriminator="refusal"): + """A refusal from the model. + + :ivar type: The type of the refusal. Always ``refusal``. Required. + :vartype type: str or ~azure.ai.projects.models.REFUSAL + :ivar refusal: The refusal explanationfrom the model. Required. + :vartype refusal: str + """ + + type: Literal[ItemContentType.REFUSAL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the refusal. Always ``refusal``. Required.""" + refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The refusal explanationfrom the model. Required.""" + + @overload + def __init__( + self, + *, + refusal: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemContentType.REFUSAL # type: ignore + + +class ItemReferenceItemParam(ItemParam, discriminator="item_reference"): + """An internal identifier for an item to reference. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.ITEM_REFERENCE + :ivar id: The service-originated ID of the previously generated response item being referenced. + Required. + :vartype id: str + """ + + type: Literal[ItemType.ITEM_REFERENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The service-originated ID of the previously generated response item being referenced. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.ITEM_REFERENCE # type: ignore + + +class LocalShellExecAction(_Model): + """Execute a shell command on the server. + + :ivar type: The type of the local shell action. Always ``exec``. Required. Default value is + "exec". + :vartype type: str + :ivar command: The command to run. Required. + :vartype command: list[str] + :ivar timeout_ms: Optional timeout in milliseconds for the command. + :vartype timeout_ms: int + :ivar working_directory: Optional working directory to run the command in. + :vartype working_directory: str + :ivar env: Environment variables to set for the command. Required. + :vartype env: dict[str, str] + :ivar user: Optional user to run the command as. + :vartype user: str + """ + + type: Literal["exec"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The type of the local shell action. Always ``exec``. Required. Default value is \"exec\".""" + command: list[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The command to run. Required.""" + timeout_ms: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional timeout in milliseconds for the command.""" + working_directory: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional working directory to run the command in.""" + env: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Environment variables to set for the command. Required.""" + user: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional user to run the command as.""" + + @overload + def __init__( + self, + *, + command: list[str], + env: dict[str, str], + timeout_ms: Optional[int] = None, + working_directory: Optional[str] = None, + user: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type: Literal["exec"] = "exec" + + +class LocalShellTool(Tool, discriminator="local_shell"): + """A tool that allows the model to execute shell commands in a local environment. + + :ivar type: The type of the local shell tool. Always ``local_shell``. Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL + """ + + type: Literal[ToolType.LOCAL_SHELL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the local shell tool. Always ``local_shell``. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.LOCAL_SHELL # type: ignore + + +class LocalShellToolCallItemParam(ItemParam, discriminator="local_shell_call"): + """A tool call to run a command on the local shell. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL + :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.LocalShellExecAction + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + call_id: str, + action: "_models.LocalShellExecAction", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL # type: ignore + + +class LocalShellToolCallItemResource(ItemResource, discriminator="local_shell_call"): + """A tool call to run a command on the local shell. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar call_id: The unique ID of the local shell tool call generated by the model. Required. + :vartype call_id: str + :ivar action: Required. + :vartype action: ~azure.ai.projects.models.LocalShellExecAction + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"]""" + call_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the local shell tool call generated by the model. Required.""" + action: "_models.LocalShellExecAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + call_id: str, + action: "_models.LocalShellExecAction", + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL # type: ignore + + +class LocalShellToolCallOutputItemParam(ItemParam, discriminator="local_shell_call_output"): + """The output of a local shell tool call. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT + :ivar output: A JSON string of the output of the local shell tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the local shell tool call. Required.""" + + @overload + def __init__( + self, + *, + output: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore + + +class LocalShellToolCallOutputItemResource(ItemResource, discriminator="local_shell_call_output"): + """The output of a local shell tool call. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.LOCAL_SHELL_CALL_OUTPUT + :ivar status: Required. Is one of the following types: Literal["in_progress"], + Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar output: A JSON string of the output of the local shell tool call. Required. + :vartype output: str + """ + + type: Literal[ItemType.LOCAL_SHELL_CALL_OUTPUT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Is one of the following types: Literal[\"in_progress\"], Literal[\"completed\"], + Literal[\"incomplete\"]""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the output of the local shell tool call. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + output: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.LOCAL_SHELL_CALL_OUTPUT # type: ignore + + +class LogProb(_Model): + """The log probability of a token. + + :ivar token: Required. + :vartype token: str + :ivar logprob: Required. + :vartype logprob: float + :ivar bytes: Required. + :vartype bytes: list[int] + :ivar top_logprobs: Required. + :vartype top_logprobs: list[~azure.ai.projects.models.TopLogProb] + """ + + token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + top_logprobs: list["_models.TopLogProb"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + token: str, + logprob: float, + bytes: list[int], + top_logprobs: list["_models.TopLogProb"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagedAzureAISearchIndex(Index, discriminator="ManagedAzureSearch"): + """Managed Azure AI Search Index Definition. + + :ivar id: Asset ID, a unique identifier for the asset. + :vartype id: str + :ivar name: The name of the resource. Required. + :vartype name: str + :ivar version: The version of the resource. Required. + :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar type: Type of index. Required. Managed Azure Search + :vartype type: str or ~azure.ai.projects.models.MANAGED_AZURE_SEARCH + :ivar vector_store_id: Vector store id of managed index. Required. + :vartype vector_store_id: str + """ + + type: Literal[IndexType.MANAGED_AZURE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of index. Required. Managed Azure Search""" + vector_store_id: str = rest_field(name="vectorStoreId", visibility=["create"]) + """Vector store id of managed index. Required.""" + + @overload + def __init__( + self, + *, + vector_store_id: str, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = IndexType.MANAGED_AZURE_SEARCH # type: ignore + + +class MCPApprovalRequestItemParam(ItemParam, discriminator="mcp_approval_request"): + """A request for human approval of a tool invocation. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST + :ivar server_label: The label of the MCP server making the request. Required. + :vartype server_label: str + :ivar name: The name of the tool to run. Required. + :vartype name: str + :ivar arguments: A JSON string of arguments for the tool. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server making the request. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of arguments for the tool. Required.""" + + @overload + def __init__( + self, + *, + server_label: str, + name: str, + arguments: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore + + +class MCPApprovalRequestItemResource(ItemResource, discriminator="mcp_approval_request"): + """A request for human approval of a tool invocation. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_REQUEST + :ivar server_label: The label of the MCP server making the request. Required. + :vartype server_label: str + :ivar name: The name of the tool to run. Required. + :vartype name: str + :ivar arguments: A JSON string of arguments for the tool. Required. + :vartype arguments: str + """ + + type: Literal[ItemType.MCP_APPROVAL_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server making the request. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of arguments for the tool. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + server_label: str, + name: str, + arguments: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_REQUEST # type: ignore + + +class MCPApprovalResponseItemParam(ItemParam, discriminator="mcp_approval_response"): + """A response to an MCP approval request. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE + :ivar approval_request_id: The ID of the approval request being answered. Required. + :vartype approval_request_id: str + :ivar approve: Whether the request was approved. Required. + :vartype approve: bool + :ivar reason: Optional reason for the decision. + :vartype reason: str + """ + + type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the approval request being answered. Required.""" + approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the request was approved. Required.""" + reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional reason for the decision.""" + + @overload + def __init__( + self, + *, + approval_request_id: str, + approve: bool, + reason: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore + + +class MCPApprovalResponseItemResource(ItemResource, discriminator="mcp_approval_response"): + """A response to an MCP approval request. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_APPROVAL_RESPONSE + :ivar approval_request_id: The ID of the approval request being answered. Required. + :vartype approval_request_id: str + :ivar approve: Whether the request was approved. Required. + :vartype approve: bool + :ivar reason: Optional reason for the decision. + :vartype reason: str + """ + + type: Literal[ItemType.MCP_APPROVAL_RESPONSE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + approval_request_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the approval request being answered. Required.""" + approve: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the request was approved. Required.""" + reason: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional reason for the decision.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + approval_request_id: str, + approve: bool, + created_by: Optional["_models.CreatedBy"] = None, + reason: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_APPROVAL_RESPONSE # type: ignore + + +class MCPCallItemParam(ItemParam, discriminator="mcp_call"): + """An invocation of a tool on an MCP server. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_CALL + :ivar server_label: The label of the MCP server running the tool. Required. + :vartype server_label: str + :ivar name: The name of the tool that was run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments passed to the tool. Required. + :vartype arguments: str + :ivar output: The output from the tool call. + :vartype output: str + :ivar error: The error from the tool call, if any. + :vartype error: str + """ + + type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server running the tool. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool that was run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments passed to the tool. Required.""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output from the tool call.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error from the tool call, if any.""" + + @overload + def __init__( + self, + *, + server_label: str, + name: str, + arguments: str, + output: Optional[str] = None, + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_CALL # type: ignore + + +class MCPCallItemResource(ItemResource, discriminator="mcp_call"): + """An invocation of a tool on an MCP server. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_CALL + :ivar server_label: The label of the MCP server running the tool. Required. + :vartype server_label: str + :ivar name: The name of the tool that was run. Required. + :vartype name: str + :ivar arguments: A JSON string of the arguments passed to the tool. Required. + :vartype arguments: str + :ivar output: The output from the tool call. + :vartype output: str + :ivar error: The error from the tool call, if any. + :vartype error: str + """ + + type: Literal[ItemType.MCP_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server running the tool. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool that was run. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A JSON string of the arguments passed to the tool. Required.""" + output: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output from the tool call.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error from the tool call, if any.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + server_label: str, + name: str, + arguments: str, + created_by: Optional["_models.CreatedBy"] = None, + output: Optional[str] = None, + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_CALL # type: ignore + + +class MCPListToolsItemParam(ItemParam, discriminator="mcp_list_tools"): + """A list of tools available on an MCP server. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS + :ivar server_label: The label of the MCP server. Required. + :vartype server_label: str + :ivar tools: The tools available on the server. Required. + :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] + :ivar error: Error message if the server could not list tools. + :vartype error: str + """ + + type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server. Required.""" + tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The tools available on the server. Required.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error message if the server could not list tools.""" + + @overload + def __init__( + self, + *, + server_label: str, + tools: list["_models.MCPListToolsTool"], + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_LIST_TOOLS # type: ignore + + +class MCPListToolsItemResource(ItemResource, discriminator="mcp_list_tools"): + """A list of tools available on an MCP server. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MCP_LIST_TOOLS + :ivar server_label: The label of the MCP server. Required. + :vartype server_label: str + :ivar tools: The tools available on the server. Required. + :vartype tools: list[~azure.ai.projects.models.MCPListToolsTool] + :ivar error: Error message if the server could not list tools. + :vartype error: str + """ + + type: Literal[ItemType.MCP_LIST_TOOLS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server. Required.""" + tools: list["_models.MCPListToolsTool"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The tools available on the server. Required.""" + error: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error message if the server could not list tools.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + server_label: str, + tools: list["_models.MCPListToolsTool"], + created_by: Optional["_models.CreatedBy"] = None, + error: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MCP_LIST_TOOLS # type: ignore + + +class MCPListToolsTool(_Model): + """A tool available on an MCP server. + + :ivar name: The name of the tool. Required. + :vartype name: str + :ivar description: The description of the tool. + :vartype description: str + :ivar input_schema: The JSON schema describing the tool's input. Required. + :vartype input_schema: any + :ivar annotations: Additional annotations about the tool. + :vartype annotations: any + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The description of the tool.""" + input_schema: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema describing the tool's input. Required.""" + annotations: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional annotations about the tool.""" + + @overload + def __init__( + self, + *, + name: str, + input_schema: Any, + description: Optional[str] = None, + annotations: Optional[Any] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPTool(Tool, discriminator="mcp"): + """Give the model access to additional tools via remote Model Context Protocol + (MCP) servers. `Learn more about MCP `_. + + :ivar type: The type of the MCP tool. Always ``mcp``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP + :ivar server_label: A label for this MCP server, used to identify it in tool calls. Required. + :vartype server_label: str + :ivar server_url: The URL for the MCP server. Required. + :vartype server_url: str + :ivar headers: Optional HTTP headers to send to the MCP server. Use for authentication + or other purposes. + :vartype headers: dict[str, str] + :ivar allowed_tools: List of allowed tool names or a filter object. Is either a [str] type or a + MCPToolAllowedTools1 type. + :vartype allowed_tools: list[str] or ~azure.ai.projects.models.MCPToolAllowedTools1 + :ivar require_approval: Specify which of the MCP server's tools require approval. Is one of the + following types: MCPToolRequireApproval1, Literal["always"], Literal["never"] + :vartype require_approval: ~azure.ai.projects.models.MCPToolRequireApproval1 or str or str + :ivar project_connection_id: The connection ID in the project for the MCP server. The + connection stores authentication and other connection details needed to connect to the MCP + server. + :vartype project_connection_id: str + """ + + type: Literal[ToolType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the MCP tool. Always ``mcp``. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A label for this MCP server, used to identify it in tool calls. Required.""" + server_url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL for the MCP server. Required.""" + headers: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional HTTP headers to send to the MCP server. Use for authentication + or other purposes.""" + allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of allowed tool names or a filter object. Is either a [str] type or a MCPToolAllowedTools1 + type.""" + require_approval: Optional[Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]]] = ( + rest_field(visibility=["read", "create", "update", "delete", "query"]) + ) + """Specify which of the MCP server's tools require approval. Is one of the following types: + MCPToolRequireApproval1, Literal[\"always\"], Literal[\"never\"]""" + project_connection_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The connection ID in the project for the MCP server. The connection stores authentication and + other connection details needed to connect to the MCP server.""" + + @overload + def __init__( + self, + *, + server_label: str, + server_url: str, + headers: Optional[dict[str, str]] = None, + allowed_tools: Optional[Union[list[str], "_models.MCPToolAllowedTools1"]] = None, + require_approval: Optional[ + Union["_models.MCPToolRequireApproval1", Literal["always"], Literal["never"]] + ] = None, + project_connection_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.MCP # type: ignore + + +class MCPToolAllowedTools1(_Model): + """MCPToolAllowedTools1. + + :ivar tool_names: List of allowed tool names. + :vartype tool_names: list[str] + """ + + tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of allowed tool names.""" + + @overload + def __init__( + self, + *, + tool_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPToolRequireApproval1(_Model): + """MCPToolRequireApproval1. + + :ivar always: A list of tools that always require approval. + :vartype always: ~azure.ai.projects.models.MCPToolRequireApprovalAlways + :ivar never: A list of tools that never require approval. + :vartype never: ~azure.ai.projects.models.MCPToolRequireApprovalNever + """ + + always: Optional["_models.MCPToolRequireApprovalAlways"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of tools that always require approval.""" + never: Optional["_models.MCPToolRequireApprovalNever"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of tools that never require approval.""" + + @overload + def __init__( + self, + *, + always: Optional["_models.MCPToolRequireApprovalAlways"] = None, + never: Optional["_models.MCPToolRequireApprovalNever"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPToolRequireApprovalAlways(_Model): + """MCPToolRequireApprovalAlways. + + :ivar tool_names: List of tools that require approval. + :vartype tool_names: list[str] + """ + + tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of tools that require approval.""" + + @overload + def __init__( + self, + *, + tool_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MCPToolRequireApprovalNever(_Model): + """MCPToolRequireApprovalNever. + + :ivar tool_names: List of tools that do not require approval. + :vartype tool_names: list[str] + """ + + tool_names: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of tools that do not require approval.""" + + @overload + def __init__( + self, + *, + tool_names: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryOperation(_Model): + """Represents a single memory operation (create, update, or delete) performed on a memory item. + + :ivar kind: The type of memory operation being performed. Required. Known values are: "create", + "update", and "delete". + :vartype kind: str or ~azure.ai.projects.models.MemoryOperationKind + :ivar memory_item: The memory item to create, update, or delete. Required. + :vartype memory_item: ~azure.ai.projects.models.MemoryItem + """ + + kind: Union[str, "_models.MemoryOperationKind"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of memory operation being performed. Required. Known values are: \"create\", + \"update\", and \"delete\".""" + memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The memory item to create, update, or delete. Required.""" + + @overload + def __init__( + self, + *, + kind: Union[str, "_models.MemoryOperationKind"], + memory_item: "_models.MemoryItem", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemorySearchItem(_Model): + """A retrieved memory item from memory search. + + :ivar memory_item: Retrieved memory item. Required. + :vartype memory_item: ~azure.ai.projects.models.MemoryItem + """ + + memory_item: "_models.MemoryItem" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Retrieved memory item. Required.""" + + @overload + def __init__( + self, + *, + memory_item: "_models.MemoryItem", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemorySearchOptions(_Model): + """Memory search options. + + :ivar max_memories: Maximum number of memory items to return. + :vartype max_memories: int + """ + + max_memories: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Maximum number of memory items to return.""" + + @overload + def __init__( + self, + *, + max_memories: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemorySearchTool(Tool, discriminator="memory_search"): + """A tool for integrating memories into the agent. + + :ivar type: The type of the tool. Always ``memory_search``. Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH + :ivar memory_store_name: The name of the memory store to use. Required. + :vartype memory_store_name: str + :ivar scope: The namespace used to group and isolate memories, such as a user ID. + Limits which memories can be retrieved or updated. + Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required. + :vartype scope: str + :ivar search_options: Options for searching the memory store. + :vartype search_options: ~azure.ai.projects.models.MemorySearchOptions + :ivar update_delay: The amount of time to wait after inactivity before updating memories with + messages from the call (e.g., '0s', '5m'). Defaults to '60s'. + :vartype update_delay: ~datetime.timedelta + """ + + type: Literal[ToolType.MEMORY_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the tool. Always ``memory_search``. Required.""" + memory_store_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store to use. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace used to group and isolate memories, such as a user ID. + Limits which memories can be retrieved or updated. + Use special variable ``{{$userId}}`` to scope memories to the current signed-in user. Required.""" + search_options: Optional["_models.MemorySearchOptions"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Options for searching the memory store.""" + update_delay: Optional[datetime.timedelta] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The amount of time to wait after inactivity before updating memories with messages from the + call (e.g., '0s', '5m'). Defaults to '60s'.""" + + @overload + def __init__( + self, + *, + memory_store_name: str, + scope: str, + search_options: Optional["_models.MemorySearchOptions"] = None, + update_delay: Optional[datetime.timedelta] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.MEMORY_SEARCH # type: ignore + + +class MemorySearchToolCallItemParam(ItemParam, discriminator="memory_search_call"): + """MemorySearchToolCallItemParam. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL + :ivar results: The results returned from the memory search. + :vartype results: list[~azure.ai.projects.models.MemorySearchItem] + """ + + type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + results: Optional[list["_models.MemorySearchItem"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results returned from the memory search.""" + + @overload + def __init__( + self, + *, + results: Optional[list["_models.MemorySearchItem"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore + + +class MemorySearchToolCallItemResource(ItemResource, discriminator="memory_search_call"): + """MemorySearchToolCallItemResource. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.MEMORY_SEARCH_CALL + :ivar status: The status of the memory search tool call. One of ``in_progress``, + ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following + types: Literal["in_progress"], Literal["searching"], Literal["completed"], + Literal["incomplete"], Literal["failed"] + :vartype status: str or str or str or str or str + :ivar results: The results returned from the memory search. + :vartype results: list[~azure.ai.projects.models.MemorySearchItem] + """ + + type: Literal[ItemType.MEMORY_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the memory search tool call. One of ``in_progress``, + ``searching``, ``completed``, ``incomplete`` or ``failed``,. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], + Literal[\"incomplete\"], Literal[\"failed\"]""" + results: Optional[list["_models.MemorySearchItem"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results returned from the memory search.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "incomplete", "failed"], + created_by: Optional["_models.CreatedBy"] = None, + results: Optional[list["_models.MemorySearchItem"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MEMORY_SEARCH_CALL # type: ignore + + +class MemoryStoreDefinition(_Model): + """Base definition for memory store configurations. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MemoryStoreDefaultDefinition + + :ivar kind: The kind of the memory store. Required. "default" + :vartype kind: str or ~azure.ai.projects.models.MemoryStoreKind + """ + + __mapping__: dict[str, _Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) + """The kind of the memory store. Required. \"default\"""" + + @overload + def __init__( + self, + *, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreDefaultDefinition(MemoryStoreDefinition, discriminator="default"): + """Default memory store implementation. + + :ivar kind: The kind of the memory store. Required. The default memory store implementation. + :vartype kind: str or ~azure.ai.projects.models.DEFAULT + :ivar chat_model: The name or identifier of the chat completion model deployment used for + memory processing. Required. + :vartype chat_model: str + :ivar embedding_model: The name or identifier of the embedding model deployment used for memory + processing. Required. + :vartype embedding_model: str + :ivar options: Default memory store options. + :vartype options: ~azure.ai.projects.models.MemoryStoreDefaultOptions + """ + + kind: Literal[MemoryStoreKind.DEFAULT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind of the memory store. Required. The default memory store implementation.""" + chat_model: str = rest_field(visibility=["read", "create"]) + """The name or identifier of the chat completion model deployment used for memory processing. + Required.""" + embedding_model: str = rest_field(visibility=["read", "create"]) + """The name or identifier of the embedding model deployment used for memory processing. Required.""" + options: Optional["_models.MemoryStoreDefaultOptions"] = rest_field(visibility=["read", "create"]) + """Default memory store options.""" + + @overload + def __init__( + self, + *, + chat_model: str, + embedding_model: str, + options: Optional["_models.MemoryStoreDefaultOptions"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = MemoryStoreKind.DEFAULT # type: ignore + + +class MemoryStoreDefaultOptions(_Model): + """Default memory store configurations. + + :ivar user_profile_enabled: Whether to enable user profile extraction and storage. Default is + true. Required. + :vartype user_profile_enabled: bool + :ivar user_profile_details: Specific categories or types of user profile information to extract + and store. + :vartype user_profile_details: str + :ivar chat_summary_enabled: Whether to enable chat summary extraction and storage. Default is + true. Required. + :vartype chat_summary_enabled: bool + """ + + user_profile_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable user profile extraction and storage. Default is true. Required.""" + user_profile_details: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Specific categories or types of user profile information to extract and store.""" + chat_summary_enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable chat summary extraction and storage. Default is true. Required.""" + + @overload + def __init__( + self, + *, + user_profile_enabled: bool, + chat_summary_enabled: bool, + user_profile_details: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreDeleteScopeResponse(_Model): + """Response for deleting memories from a scope. + + :ivar object: The object type. Always 'memory_store.scope.deleted'. Required. Default value is + "memory_store.scope.deleted". + :vartype object: str + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar scope: The scope from which memories were deleted. Required. + :vartype scope: str + :ivar deleted: Whether the deletion operation was successful. Required. + :vartype deleted: bool + """ + + object: Literal["memory_store.scope.deleted"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The object type. Always 'memory_store.scope.deleted'. Required. Default value is + \"memory_store.scope.deleted\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + scope: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The scope from which memories were deleted. Required.""" + deleted: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the deletion operation was successful. Required.""" + + @overload + def __init__( + self, + *, + name: str, + scope: str, + deleted: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["memory_store.scope.deleted"] = "memory_store.scope.deleted" + + +class MemoryStoreObject(_Model): + """A memory store that can store and retrieve user memories. + + :ivar object: The object type, which is always 'memory_store'. Required. Default value is + "memory_store". + :vartype object: str + :ivar id: The unique identifier of the memory store. Required. + :vartype id: str + :ivar created_at: The Unix timestamp (seconds) when the memory store was created. Required. + :vartype created_at: ~datetime.datetime + :ivar updated_at: The Unix timestamp (seconds) when the memory store was last updated. + Required. + :vartype updated_at: ~datetime.datetime + :ivar name: The name of the memory store. Required. + :vartype name: str + :ivar description: A human-readable description of the memory store. + :vartype description: str + :ivar metadata: Arbitrary key-value metadata to associate with the memory store. + :vartype metadata: dict[str, str] + :ivar definition: The definition of the memory store. Required. + :vartype definition: ~azure.ai.projects.models.MemoryStoreDefinition + """ + + object: Literal["memory_store"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type, which is always 'memory_store'. Required. Default value is \"memory_store\".""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the memory store. Required.""" + created_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the memory store was created. Required.""" + updated_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """The Unix timestamp (seconds) when the memory store was last updated. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the memory store. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the memory store.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Arbitrary key-value metadata to associate with the memory store.""" + definition: "_models.MemoryStoreDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The definition of the memory store. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + created_at: datetime.datetime, + updated_at: datetime.datetime, + name: str, + definition: "_models.MemoryStoreDefinition", + description: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["memory_store"] = "memory_store" + + +class MemoryStoreOperationUsage(_Model): + """Usage statistics of a memory store operation. + + :ivar embedding_tokens: The number of embedding tokens. Required. + :vartype embedding_tokens: int + :ivar input_tokens: The number of input tokens. Required. + :vartype input_tokens: int + :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. + :vartype input_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails + :ivar output_tokens: The number of output tokens. Required. + :vartype output_tokens: int + :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. + :vartype output_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails + :ivar total_tokens: The total number of tokens used. Required. + :vartype total_tokens: int + """ + + embedding_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of embedding tokens. Required.""" + input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of input tokens. Required.""" + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the input tokens. Required.""" + output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of output tokens. Required.""" + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the output tokens. Required.""" + total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The total number of tokens used. Required.""" + + @overload + def __init__( + self, + *, + embedding_tokens: int, + input_tokens: int, + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", + output_tokens: int, + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", + total_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreOperationUsageInputTokensDetails(_Model): # pylint: disable=name-too-long + """MemoryStoreOperationUsageInputTokensDetails. + + :ivar cached_tokens: The number of tokens that were retrieved from the cache. + `More on prompt caching `_. Required. + :vartype cached_tokens: int + """ + + cached_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of tokens that were retrieved from the cache. + `More on prompt caching `_. Required.""" + + @overload + def __init__( + self, + *, + cached_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreOperationUsageOutputTokensDetails(_Model): # pylint: disable=name-too-long + """MemoryStoreOperationUsageOutputTokensDetails. + + :ivar reasoning_tokens: The number of reasoning tokens. Required. + :vartype reasoning_tokens: int + """ + + reasoning_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of reasoning tokens. Required.""" + + @overload + def __init__( + self, + *, + reasoning_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreSearchResponse(_Model): + """Memory search response. + + :ivar search_id: The unique ID of this search request. Use this value as previous_search_id in + subsequent requests to perform incremental searches. Required. + :vartype search_id: str + :ivar memories: Related memory items found during the search operation. Required. + :vartype memories: list[~azure.ai.projects.models.MemorySearchItem] + :ivar usage: Usage statistics associated with the memory search operation. Required. + :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage + """ + + search_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of this search request. Use this value as previous_search_id in subsequent + requests to perform incremental searches. Required.""" + memories: list["_models.MemorySearchItem"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Related memory items found during the search operation. Required.""" + usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Usage statistics associated with the memory search operation. Required.""" + + @overload + def __init__( + self, + *, + search_id: str, + memories: list["_models.MemorySearchItem"], + usage: "_models.MemoryStoreOperationUsage", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreUpdateResponse(_Model): + """Provides the status of a memory store update operation. + + :ivar update_id: The unique ID of this update request. Use this value as previous_update_id in + subsequent requests to perform incremental updates. Required. + :vartype update_id: str + :ivar status: The status of the memory update operation. One of "queued", "in_progress", + "completed", "failed", or "superseded". Required. Known values are: "queued", "in_progress", + "completed", "failed", and "superseded". + :vartype status: str or ~azure.ai.projects.models.MemoryStoreUpdateStatus + :ivar superseded_by: The update_id the operation was superseded by when status is "superseded". + :vartype superseded_by: str + :ivar result: The result of memory store update operation when status is "completed". + :vartype result: ~azure.ai.projects.models.MemoryStoreUpdateResult + :ivar error: Error object that describes the error when status is "failed". + :vartype error: ~azure.ai.projects.models.ApiError + """ + + update_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of this update request. Use this value as previous_update_id in subsequent + requests to perform incremental updates. Required.""" + status: Union[str, "_models.MemoryStoreUpdateStatus"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the memory update operation. One of \"queued\", \"in_progress\", \"completed\", + \"failed\", or \"superseded\". Required. Known values are: \"queued\", \"in_progress\", + \"completed\", \"failed\", and \"superseded\".""" + superseded_by: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The update_id the operation was superseded by when status is \"superseded\".""" + result: Optional["_models.MemoryStoreUpdateResult"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The result of memory store update operation when status is \"completed\".""" + error: Optional["_models.ApiError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error object that describes the error when status is \"failed\".""" + + @overload + def __init__( + self, + *, + update_id: str, + status: Union[str, "_models.MemoryStoreUpdateStatus"], + superseded_by: Optional[str] = None, + result: Optional["_models.MemoryStoreUpdateResult"] = None, + error: Optional["_models.ApiError"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MemoryStoreUpdateResult(_Model): + """Memory update result. + + :ivar memory_operations: A list of individual memory operations that were performed during the + update. Required. + :vartype memory_operations: list[~azure.ai.projects.models.MemoryOperation] + :ivar usage: Usage statistics associated with the memory update operation. Required. + :vartype usage: ~azure.ai.projects.models.MemoryStoreOperationUsage + """ + + memory_operations: list["_models.MemoryOperation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of individual memory operations that were performed during the update. Required.""" + usage: "_models.MemoryStoreOperationUsage" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Usage statistics associated with the memory update operation. Required.""" + + @overload + def __init__( + self, + *, + memory_operations: list["_models.MemoryOperation"], + usage: "_models.MemoryStoreOperationUsage", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MicrosoftFabricAgentTool(Tool, discriminator="fabric_dataagent_preview"): + """The input definition information for a Microsoft Fabric tool as used to configure an agent. + + :ivar type: The object type, which is always 'fabric_dataagent'. Required. + :vartype type: str or ~azure.ai.projects.models.FABRIC_DATAAGENT_PREVIEW + :ivar fabric_dataagent_preview: The fabric data agent tool parameters. Required. + :vartype fabric_dataagent_preview: ~azure.ai.projects.models.FabricDataAgentToolParameters + """ + + type: Literal[ToolType.FABRIC_DATAAGENT_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'fabric_dataagent'. Required.""" + fabric_dataagent_preview: "_models.FabricDataAgentToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The fabric data agent tool parameters. Required.""" + + @overload + def __init__( + self, + *, + fabric_dataagent_preview: "_models.FabricDataAgentToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.FABRIC_DATAAGENT_PREVIEW # type: ignore + + +class ModelDeployment(Deployment, discriminator="ModelDeployment"): + """Model Deployment Definition. + + :ivar name: Name of the deployment. Required. + :vartype name: str + :ivar type: The type of the deployment. Required. Model deployment + :vartype type: str or ~azure.ai.projects.models.MODEL_DEPLOYMENT + :ivar model_name: Publisher-specific name of the deployed model. Required. + :vartype model_name: str + :ivar model_version: Publisher-specific version of the deployed model. Required. + :vartype model_version: str + :ivar model_publisher: Name of the deployed model's publisher. Required. + :vartype model_publisher: str + :ivar capabilities: Capabilities of deployed model. Required. + :vartype capabilities: dict[str, str] + :ivar sku: Sku of the model deployment. Required. + :vartype sku: ~azure.ai.projects.models.ModelDeploymentSku + :ivar connection_name: Name of the connection the deployment comes from. + :vartype connection_name: str + """ + + type: Literal[DeploymentType.MODEL_DEPLOYMENT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the deployment. Required. Model deployment""" + model_name: str = rest_field(name="modelName", visibility=["read"]) + """Publisher-specific name of the deployed model. Required.""" + model_version: str = rest_field(name="modelVersion", visibility=["read"]) + """Publisher-specific version of the deployed model. Required.""" + model_publisher: str = rest_field(name="modelPublisher", visibility=["read"]) + """Name of the deployed model's publisher. Required.""" + capabilities: dict[str, str] = rest_field(visibility=["read"]) + """Capabilities of deployed model. Required.""" + sku: "_models.ModelDeploymentSku" = rest_field(visibility=["read"]) + """Sku of the model deployment. Required.""" + connection_name: Optional[str] = rest_field(name="connectionName", visibility=["read"]) + """Name of the connection the deployment comes from.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = DeploymentType.MODEL_DEPLOYMENT # type: ignore + + +class ModelDeploymentSku(_Model): + """Sku information. + + :ivar capacity: Sku capacity. Required. + :vartype capacity: int + :ivar family: Sku family. Required. + :vartype family: str + :ivar name: Sku name. Required. + :vartype name: str + :ivar size: Sku size. Required. + :vartype size: str + :ivar tier: Sku tier. Required. + :vartype tier: str + """ + + capacity: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku capacity. Required.""" + family: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku family. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku name. Required.""" + size: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku size. Required.""" + tier: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Sku tier. Required.""" + + @overload + def __init__( + self, + *, + capacity: int, + family: str, + name: str, + size: str, + tier: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MonthlyRecurrenceSchedule(RecurrenceSchedule, discriminator="Monthly"): + """Monthly recurrence schedule. + + :ivar type: Monthly recurrence type. Required. Monthly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.MONTHLY + :ivar days_of_month: Days of the month for the recurrence schedule. Required. + :vartype days_of_month: list[int] + """ + + type: Literal[RecurrenceType.MONTHLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Monthly recurrence type. Required. Monthly recurrence pattern.""" + days_of_month: list[int] = rest_field( + name="daysOfMonth", visibility=["read", "create", "update", "delete", "query"] + ) + """Days of the month for the recurrence schedule. Required.""" + + @overload + def __init__( + self, + *, + days_of_month: list[int], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.MONTHLY # type: ignore + + +class NoAuthenticationCredentials(BaseCredentials, discriminator="None"): + """Credentials that do not require authentication. + + :ivar type: The credential type. Required. No credential + :vartype type: str or ~azure.ai.projects.models.NONE + """ + + type: Literal[CredentialType.NONE] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. No credential""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.NONE # type: ignore + + +class OAuthConsentRequestItemResource(ItemResource, discriminator="oauth_consent_request"): + """Request from the service for the user to perform OAuth consent. + + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar id: Required. + :vartype id: str + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.OAUTH_CONSENT_REQUEST + :ivar consent_link: The link the user can use to perform OAuth consent. Required. + :vartype consent_link: str + :ivar server_label: The server label for the OAuth consent request. Required. + :vartype server_label: str + """ + + type: Literal[ItemType.OAUTH_CONSENT_REQUEST] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + consent_link: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The link the user can use to perform OAuth consent. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The server label for the OAuth consent request. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + consent_link: str, + server_label: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.OAUTH_CONSENT_REQUEST # type: ignore + + +class OneTimeTrigger(Trigger, discriminator="OneTime"): + """One-time trigger. + + :ivar type: Required. One-time trigger. + :vartype type: str or ~azure.ai.projects.models.ONE_TIME + :ivar trigger_at: Date and time for the one-time trigger in ISO 8601 format. Required. + :vartype trigger_at: str + :ivar time_zone: Time zone for the one-time trigger. + :vartype time_zone: str + """ + + type: Literal[TriggerType.ONE_TIME] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. One-time trigger.""" + trigger_at: str = rest_field(name="triggerAt", visibility=["read", "create", "update", "delete", "query"]) + """Date and time for the one-time trigger in ISO 8601 format. Required.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the one-time trigger.""" + + @overload + def __init__( + self, + *, + trigger_at: str, + time_zone: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = TriggerType.ONE_TIME # type: ignore + + +class OpenApiAgentTool(Tool, discriminator="openapi"): + """The input definition information for an OpenAPI tool as used to configure an agent. + + :ivar type: The object type, which is always 'openapi'. Required. + :vartype type: str or ~azure.ai.projects.models.OPENAPI + :ivar openapi: The openapi function definition. Required. + :vartype openapi: ~azure.ai.projects.models.OpenApiFunctionDefinition + """ + + type: Literal[ToolType.OPENAPI] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'openapi'. Required.""" + openapi: "_models.OpenApiFunctionDefinition" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The openapi function definition. Required.""" + + @overload + def __init__( + self, + *, + openapi: "_models.OpenApiFunctionDefinition", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.OPENAPI # type: ignore + + +class OpenApiAuthDetails(_Model): + """authentication details for OpenApiFunctionDefinition. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + OpenApiAnonymousAuthDetails, OpenApiManagedAuthDetails, OpenApiProjectConnectionAuthDetails + + :ivar type: The type of authentication, must be anonymous/project_connection/managed_identity. + Required. Known values are: "anonymous", "project_connection", and "managed_identity". + :vartype type: str or ~azure.ai.projects.models.OpenApiAuthType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The type of authentication, must be anonymous/project_connection/managed_identity. Required. + Known values are: \"anonymous\", \"project_connection\", and \"managed_identity\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiAnonymousAuthDetails(OpenApiAuthDetails, discriminator="anonymous"): + """Security details for OpenApi anonymous authentication. + + :ivar type: The object type, which is always 'anonymous'. Required. + :vartype type: str or ~azure.ai.projects.models.ANONYMOUS + """ + + type: Literal[OpenApiAuthType.ANONYMOUS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'anonymous'. Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = OpenApiAuthType.ANONYMOUS # type: ignore + + +class OpenApiFunctionDefinition(_Model): + """The input definition information for an openapi function. + + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar spec: The openapi function shape, described as a JSON Schema object. Required. + :vartype spec: any + :ivar auth: Open API authentication details. Required. + :vartype auth: ~azure.ai.projects.models.OpenApiAuthDetails + :ivar default_params: List of OpenAPI spec parameters that will use user-provided defaults. + :vartype default_params: list[str] + :ivar functions: List of function definitions used by OpenApi tool. + :vartype functions: list[~azure.ai.projects.models.OpenApiFunctionDefinitionFunction] + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + spec: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The openapi function shape, described as a JSON Schema object. Required.""" + auth: "_models.OpenApiAuthDetails" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Open API authentication details. Required.""" + default_params: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of OpenAPI spec parameters that will use user-provided defaults.""" + functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of function definitions used by OpenApi tool.""" + + @overload + def __init__( + self, + *, + name: str, + spec: Any, + auth: "_models.OpenApiAuthDetails", + description: Optional[str] = None, + default_params: Optional[list[str]] = None, + functions: Optional[list["_models.OpenApiFunctionDefinitionFunction"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiFunctionDefinitionFunction(_Model): + """OpenApiFunctionDefinitionFunction. + + :ivar name: The name of the function to be called. Required. + :vartype name: str + :ivar description: A description of what the function does, used by the model to choose when + and how to call the function. + :vartype description: str + :ivar parameters: The parameters the functions accepts, described as a JSON Schema object. + Required. + :vartype parameters: any + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to be called. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the function does, used by the model to choose when and how to call the + function.""" + parameters: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The parameters the functions accepts, described as a JSON Schema object. Required.""" + + @overload + def __init__( + self, + *, + name: str, + parameters: Any, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiManagedAuthDetails(OpenApiAuthDetails, discriminator="managed_identity"): + """Security details for OpenApi managed_identity authentication. + + :ivar type: The object type, which is always 'managed_identity'. Required. + :vartype type: str or ~azure.ai.projects.models.MANAGED_IDENTITY + :ivar security_scheme: Connection auth security details. Required. + :vartype security_scheme: ~azure.ai.projects.models.OpenApiManagedSecurityScheme + """ + + type: Literal[OpenApiAuthType.MANAGED_IDENTITY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'managed_identity'. Required.""" + security_scheme: "_models.OpenApiManagedSecurityScheme" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Connection auth security details. Required.""" + + @overload + def __init__( + self, + *, + security_scheme: "_models.OpenApiManagedSecurityScheme", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = OpenApiAuthType.MANAGED_IDENTITY # type: ignore + + +class OpenApiManagedSecurityScheme(_Model): + """Security scheme for OpenApi managed_identity authentication. + + :ivar audience: Authentication scope for managed_identity auth type. Required. + :vartype audience: str + """ + + audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Authentication scope for managed_identity auth type. Required.""" + + @overload + def __init__( + self, + *, + audience: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OpenApiProjectConnectionAuthDetails(OpenApiAuthDetails, discriminator="project_connection"): + """Security details for OpenApi project connection authentication. + + :ivar type: The object type, which is always 'project_connection'. Required. + :vartype type: str or ~azure.ai.projects.models.PROJECT_CONNECTION + :ivar security_scheme: Project connection auth security details. Required. + :vartype security_scheme: ~azure.ai.projects.models.OpenApiProjectConnectionSecurityScheme + """ + + type: Literal[OpenApiAuthType.PROJECT_CONNECTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'project_connection'. Required.""" + security_scheme: "_models.OpenApiProjectConnectionSecurityScheme" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Project connection auth security details. Required.""" + + @overload + def __init__( + self, + *, + security_scheme: "_models.OpenApiProjectConnectionSecurityScheme", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = OpenApiAuthType.PROJECT_CONNECTION # type: ignore + + +class OpenApiProjectConnectionSecurityScheme(_Model): + """Security scheme for OpenApi managed_identity authentication. + + :ivar project_connection_id: Project connection id for Project Connection auth type. Required. + :vartype project_connection_id: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Project connection id for Project Connection auth type. Required.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PagedScheduleRun(_Model): + """Paged collection of ScheduleRun items. + + :ivar value: The ScheduleRun items on this page. Required. + :vartype value: list[~azure.ai.projects.models.ScheduleRun] + :ivar next_link: The link to the next page of items. + :vartype next_link: str + """ + + value: list["_models.ScheduleRun"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ScheduleRun items on this page. Required.""" + next_link: Optional[str] = rest_field(name="nextLink", visibility=["read", "create", "update", "delete", "query"]) + """The link to the next page of items.""" + + @overload + def __init__( + self, + *, + value: list["_models.ScheduleRun"], + next_link: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PendingUploadRequest(_Model): + """Represents a request for a pending upload. + + :ivar pending_upload_id: If PendingUploadId is not provided, a random GUID will be used. + :vartype pending_upload_id: str + :ivar connection_name: Azure Storage Account connection name to use for generating temporary + SAS token. + :vartype connection_name: str + :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference + is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE + """ + + pending_upload_id: Optional[str] = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """If PendingUploadId is not provided, a random GUID will be used.""" + connection_name: Optional[str] = rest_field( + name="connectionName", visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Storage Account connection name to use for generating temporary SAS token.""" + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + ) + """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" + + @overload + def __init__( + self, + *, + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], + pending_upload_id: Optional[str] = None, + connection_name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PendingUploadResponse(_Model): + """Represents the response for a pending upload request. + + :ivar blob_reference: Container-level read, write, list SAS. Required. + :vartype blob_reference: ~azure.ai.projects.models.BlobReference + :ivar pending_upload_id: ID for this upload request. Required. + :vartype pending_upload_id: str + :ivar version: Version of asset to be created if user did not specify version when initially + creating upload. + :vartype version: str + :ivar pending_upload_type: BlobReference is the only supported type. Required. Blob Reference + is the only supported type. + :vartype pending_upload_type: str or ~azure.ai.projects.models.BLOB_REFERENCE + """ + + blob_reference: "_models.BlobReference" = rest_field( + name="blobReference", visibility=["read", "create", "update", "delete", "query"] + ) + """Container-level read, write, list SAS. Required.""" + pending_upload_id: str = rest_field( + name="pendingUploadId", visibility=["read", "create", "update", "delete", "query"] + ) + """ID for this upload request. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Version of asset to be created if user did not specify version when initially creating upload.""" + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE] = rest_field( + name="pendingUploadType", visibility=["read", "create", "update", "delete", "query"] + ) + """BlobReference is the only supported type. Required. Blob Reference is the only supported type.""" + + @overload + def __init__( + self, + *, + blob_reference: "_models.BlobReference", + pending_upload_id: str, + pending_upload_type: Literal[PendingUploadType.BLOB_REFERENCE], + version: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Prompt(_Model): + """Reference to a prompt template and its variables. + `Learn more `_. + + :ivar id: The unique identifier of the prompt template to use. Required. + :vartype id: str + :ivar version: Optional version of the prompt template. + :vartype version: str + :ivar variables: + :vartype variables: ~azure.ai.projects.models.ResponsePromptVariables + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the prompt template to use. Required.""" + version: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional version of the prompt template.""" + variables: Optional["_models.ResponsePromptVariables"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + version: Optional[str] = None, + variables: Optional["_models.ResponsePromptVariables"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PromptAgentDefinition(AgentDefinition, discriminator="prompt"): + """The prompt agent definition. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.PROMPT + :ivar model: The model deployment to use for this agent. Required. + :vartype model: str + :ivar instructions: A system (or developer) message inserted into the model's context. + :vartype instructions: str + :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 + will make the output more random, while lower values like 0.2 will make it more focused and + deterministic. + We generally recommend altering this or ``top_p`` but not both. + :vartype temperature: float + :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + We generally recommend altering this or ``temperature`` but not both. + :vartype top_p: float + :ivar reasoning: + :vartype reasoning: ~azure.ai.projects.models.Reasoning + :ivar tools: An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter. + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar text: Configuration options for a text response from the model. Can be plain text or + structured JSON data. + :vartype text: ~azure.ai.projects.models.PromptAgentDefinitionText + :ivar structured_inputs: Set of structured inputs that can participate in prompt template + substitution or tool argument bindings. + :vartype structured_inputs: dict[str, ~azure.ai.projects.models.StructuredInputDefinition] + """ + + kind: Literal[AgentKind.PROMPT] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + model: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The model deployment to use for this agent. Required.""" + instructions: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A system (or developer) message inserted into the model's context.""" + temperature: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output + more random, while lower values like 0.2 will make it more focused and deterministic. + We generally recommend altering this or ``top_p`` but not both.""" + top_p: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + We generally recommend altering this or ``temperature`` but not both.""" + reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter.""" + text: Optional["_models.PromptAgentDefinitionText"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration options for a text response from the model. Can be plain text or structured JSON + data.""" + structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Set of structured inputs that can participate in prompt template substitution or tool argument + bindings.""" + + @overload + def __init__( + self, + *, + model: str, + rai_config: Optional["_models.RaiConfig"] = None, + instructions: Optional[str] = None, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + reasoning: Optional["_models.Reasoning"] = None, + tools: Optional[list["_models.Tool"]] = None, + text: Optional["_models.PromptAgentDefinitionText"] = None, + structured_inputs: Optional[dict[str, "_models.StructuredInputDefinition"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.PROMPT # type: ignore + + +class PromptAgentDefinitionText(_Model): + """PromptAgentDefinitionText. + + :ivar format: + :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration + """ + + format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + + @overload + def __init__( + self, + *, + format: Optional["_models.ResponseTextFormatConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PromptBasedEvaluatorDefinition(EvaluatorDefinition, discriminator="prompt"): + """Prompt-based evaluator. + + :ivar init_parameters: The JSON schema (Draft 2020-12) for the evaluator's input parameters. + This includes parameters like type, properties, required. + :vartype init_parameters: any + :ivar data_schema: The JSON schema (Draft 2020-12) for the evaluator's input data. This + includes parameters like type, properties, required. + :vartype data_schema: any + :ivar metrics: List of output metrics produced by this evaluator. + :vartype metrics: dict[str, ~azure.ai.projects.models.EvaluatorMetric] + :ivar type: Required. Prompt-based definition + :vartype type: str or ~azure.ai.projects.models.PROMPT + :ivar prompt_text: The prompt text used for evaluation. Required. + :vartype prompt_text: str + """ + + type: Literal[EvaluatorDefinitionType.PROMPT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required. Prompt-based definition""" + prompt_text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The prompt text used for evaluation. Required.""" + + @overload + def __init__( + self, + *, + prompt_text: str, + init_parameters: Optional[Any] = None, + data_schema: Optional[Any] = None, + metrics: Optional[dict[str, "_models.EvaluatorMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = EvaluatorDefinitionType.PROMPT # type: ignore + + +class ProtocolVersionRecord(_Model): + """A record mapping for a single protocol and its version. + + :ivar protocol: The protocol type. Required. Known values are: "activity_protocol" and + "responses". + :vartype protocol: str or ~azure.ai.projects.models.AgentProtocol + :ivar version: The version string for the protocol, e.g. 'v0.1.1'. Required. + :vartype version: str + """ + + protocol: Union[str, "_models.AgentProtocol"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocol type. Required. Known values are: \"activity_protocol\" and \"responses\".""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version string for the protocol, e.g. 'v0.1.1'. Required.""" + + @overload + def __init__( + self, + *, + protocol: Union[str, "_models.AgentProtocol"], + version: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RaiConfig(_Model): + """Configuration for Responsible AI (RAI) content filtering and safety features. + + :ivar rai_policy_name: The name of the RAI policy to apply. Required. + :vartype rai_policy_name: str + """ + + rai_policy_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the RAI policy to apply. Required.""" + + @overload + def __init__( + self, + *, + rai_policy_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RankingOptions(_Model): + """RankingOptions. + + :ivar ranker: The ranker to use for the file search. Is either a Literal["auto"] type or a + Literal["default-2024-11-15"] type. + :vartype ranker: str or str + :ivar score_threshold: The score threshold for the file search, a number between 0 and 1. + Numbers closer to 1 will attempt to return only the most relevant results, but may return fewer + results. + :vartype score_threshold: float + """ + + ranker: Optional[Literal["auto", "default-2024-11-15"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The ranker to use for the file search. Is either a Literal[\"auto\"] type or a + Literal[\"default-2024-11-15\"] type.""" + score_threshold: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The score threshold for the file search, a number between 0 and 1. Numbers closer to 1 will + attempt to return only the most relevant results, but may return fewer results.""" + + @overload + def __init__( + self, + *, + ranker: Optional[Literal["auto", "default-2024-11-15"]] = None, + score_threshold: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Reasoning(_Model): + """**o-series models only** + Configuration options for + `reasoning models `_. + + :ivar effort: Known values are: "low", "medium", and "high". + :vartype effort: str or ~azure.ai.projects.models.ReasoningEffort + :ivar summary: A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal["auto"], + Literal["concise"], Literal["detailed"] + :vartype summary: str or str or str + :ivar generate_summary: **Deprecated:** use ``summary`` instead. + A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: Literal["auto"], + Literal["concise"], Literal["detailed"] + :vartype generate_summary: str or str or str + """ + + effort: Optional[Union[str, "_models.ReasoningEffort"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Known values are: \"low\", \"medium\", and \"high\".""" + summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: + Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" + generate_summary: Optional[Literal["auto", "concise", "detailed"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """**Deprecated:** use ``summary`` instead. + A summary of the reasoning performed by the model. This can be + useful for debugging and understanding the model's reasoning process. + One of ``auto``, ``concise``, or ``detailed``. Is one of the following types: + Literal[\"auto\"], Literal[\"concise\"], Literal[\"detailed\"]""" + + @overload + def __init__( + self, + *, + effort: Optional[Union[str, "_models.ReasoningEffort"]] = None, + summary: Optional[Literal["auto", "concise", "detailed"]] = None, + generate_summary: Optional[Literal["auto", "concise", "detailed"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ReasoningItemParam(ItemParam, discriminator="reasoning"): + """A description of the chain of thought used by a reasoning model while generating + a response. Be sure to include these items in your ``input`` to the Responses API + for subsequent turns of a conversation if you are manually + `managing context `_. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.REASONING + :ivar encrypted_content: The encrypted content of the reasoning item - populated when a + response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter. + :vartype encrypted_content: str + :ivar summary: Reasoning text contents. Required. + :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] + """ + + type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The encrypted content of the reasoning item - populated when a response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" + summary: list["_models.ReasoningItemSummaryPart"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Reasoning text contents. Required.""" + + @overload + def __init__( + self, + *, + summary: list["_models.ReasoningItemSummaryPart"], + encrypted_content: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.REASONING # type: ignore + + +class ReasoningItemResource(ItemResource, discriminator="reasoning"): + """A description of the chain of thought used by a reasoning model while generating + a response. Be sure to include these items in your ``input`` to the Responses API + for subsequent turns of a conversation if you are manually + `managing context `_. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.REASONING + :ivar encrypted_content: The encrypted content of the reasoning item - populated when a + response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter. + :vartype encrypted_content: str + :ivar summary: Reasoning text contents. Required. + :vartype summary: list[~azure.ai.projects.models.ReasoningItemSummaryPart] + """ + + type: Literal[ItemType.REASONING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + encrypted_content: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The encrypted content of the reasoning item - populated when a response is + generated with ``reasoning.encrypted_content`` in the ``include`` parameter.""" + summary: list["_models.ReasoningItemSummaryPart"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Reasoning text contents. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + summary: list["_models.ReasoningItemSummaryPart"], + created_by: Optional["_models.CreatedBy"] = None, + encrypted_content: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.REASONING # type: ignore + + +class ReasoningItemSummaryPart(_Model): + """ReasoningItemSummaryPart. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ReasoningItemSummaryTextPart + + :ivar type: Required. "summary_text" + :vartype type: str or ~azure.ai.projects.models.ReasoningItemSummaryPartType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. \"summary_text\"""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ReasoningItemSummaryTextPart(ReasoningItemSummaryPart, discriminator="summary_text"): + """ReasoningItemSummaryTextPart. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.SUMMARY_TEXT + :ivar text: Required. + :vartype text: str + """ + + type: Literal[ReasoningItemSummaryPartType.SUMMARY_TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ReasoningItemSummaryPartType.SUMMARY_TEXT # type: ignore + + +class RecurrenceTrigger(Trigger, discriminator="Recurrence"): + """Recurrence based trigger. + + :ivar type: Type of the trigger. Required. Recurrence based trigger. + :vartype type: str or ~azure.ai.projects.models.RECURRENCE + :ivar start_time: Start time for the recurrence schedule in ISO 8601 format. + :vartype start_time: str + :ivar end_time: End time for the recurrence schedule in ISO 8601 format. + :vartype end_time: str + :ivar time_zone: Time zone for the recurrence schedule. + :vartype time_zone: str + :ivar interval: Interval for the recurrence schedule. Required. + :vartype interval: int + :ivar schedule: Recurrence schedule for the recurrence trigger. Required. + :vartype schedule: ~azure.ai.projects.models.RecurrenceSchedule + """ + + type: Literal[TriggerType.RECURRENCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of the trigger. Required. Recurrence based trigger.""" + start_time: Optional[str] = rest_field(name="startTime", visibility=["read", "create", "update", "delete", "query"]) + """Start time for the recurrence schedule in ISO 8601 format.""" + end_time: Optional[str] = rest_field(name="endTime", visibility=["read", "create", "update", "delete", "query"]) + """End time for the recurrence schedule in ISO 8601 format.""" + time_zone: Optional[str] = rest_field(name="timeZone", visibility=["read", "create", "update", "delete", "query"]) + """Time zone for the recurrence schedule.""" + interval: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Interval for the recurrence schedule. Required.""" + schedule: "_models.RecurrenceSchedule" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Recurrence schedule for the recurrence trigger. Required.""" + + @overload + def __init__( + self, + *, + interval: int, + schedule: "_models.RecurrenceSchedule", + start_time: Optional[str] = None, + end_time: Optional[str] = None, + time_zone: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = TriggerType.RECURRENCE # type: ignore + + +class RedTeam(_Model): + """Red team details. + + :ivar name: Identifier of the red team run. Required. + :vartype name: str + :ivar display_name: Name of the red-team run. + :vartype display_name: str + :ivar num_turns: Number of simulation rounds. + :vartype num_turns: int + :ivar attack_strategies: List of attack strategies or nested lists of attack strategies. + :vartype attack_strategies: list[str or ~azure.ai.projects.models.AttackStrategy] + :ivar simulation_only: Simulation-only or Simulation + Evaluation. Default false, if true the + scan outputs conversation not evaluation result. + :vartype simulation_only: bool + :ivar risk_categories: List of risk categories to generate attack objectives for. + :vartype risk_categories: list[str or ~azure.ai.projects.models.RiskCategory] + :ivar application_scenario: Application scenario for the red team operation, to generate + scenario specific attacks. + :vartype application_scenario: str + :ivar tags: Red team's tags. Unlike properties, tags are fully mutable. + :vartype tags: dict[str, str] + :ivar properties: Red team's properties. Unlike tags, properties are add-only. Once added, a + property cannot be removed. + :vartype properties: dict[str, str] + :ivar status: Status of the red-team. It is set by service and is read-only. + :vartype status: str + :ivar target: Target configuration for the red-team run. Required. + :vartype target: ~azure.ai.projects.models.TargetConfig + """ + + name: str = rest_field(name="id", visibility=["read"]) + """Identifier of the red team run. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the red-team run.""" + num_turns: Optional[int] = rest_field(name="numTurns", visibility=["read", "create", "update", "delete", "query"]) + """Number of simulation rounds.""" + attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = rest_field( + name="attackStrategies", visibility=["read", "create", "update", "delete", "query"] + ) + """List of attack strategies or nested lists of attack strategies.""" + simulation_only: Optional[bool] = rest_field( + name="simulationOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """Simulation-only or Simulation + Evaluation. Default false, if true the scan outputs + conversation not evaluation result.""" + risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = rest_field( + name="riskCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of risk categories to generate attack objectives for.""" + application_scenario: Optional[str] = rest_field( + name="applicationScenario", visibility=["read", "create", "update", "delete", "query"] + ) + """Application scenario for the red team operation, to generate scenario specific attacks.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's tags. Unlike properties, tags are fully mutable.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Red team's properties. Unlike tags, properties are add-only. Once added, a property cannot be + removed.""" + status: Optional[str] = rest_field(visibility=["read"]) + """Status of the red-team. It is set by service and is read-only.""" + target: "_models.TargetConfig" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Target configuration for the red-team run. Required.""" + + @overload + def __init__( + self, + *, + target: "_models.TargetConfig", + display_name: Optional[str] = None, + num_turns: Optional[int] = None, + attack_strategies: Optional[list[Union[str, "_models.AttackStrategy"]]] = None, + simulation_only: Optional[bool] = None, + risk_categories: Optional[list[Union[str, "_models.RiskCategory"]]] = None, + application_scenario: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Response(_Model): + """Response. + + :ivar metadata: Set of 16 key-value pairs that can be attached to an object. This can be +useful for storing additional information about the object in a structured +format, and querying for objects via API or the dashboard. +Keys are strings with a maximum length of 64 characters. Values are strings +with a maximum length of 512 characters. Required. + :vartype metadata: dict[str, str] + :ivar temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 + will make the output more random, while lower values like 0.2 will make it more focused and + deterministic. +We generally recommend altering this or ``top_p`` but not both. Required. + :vartype temperature: float + :ivar top_p: An alternative to sampling with temperature, called nucleus sampling, +where the model considers the results of the tokens with top_p probability +mass. So 0.1 means only the tokens comprising the top 10% probability mass +are considered. +We generally recommend altering this or ``temperature`` but not both. Required. + :vartype top_p: float + :ivar user: A unique identifier representing your end-user, which can help OpenAI to monitor + and detect abuse. `Learn more `_. Required. + :vartype user: str + :ivar service_tier: Note: service_tier is not applicable to Azure OpenAI. Known values are: + "auto", "default", "flex", "scale", and "priority". + :vartype service_tier: str or ~azure.ai.projects.models.ServiceTier + :ivar top_logprobs: An integer between 0 and 20 specifying the number of most likely tokens to + return at each token position, each with an associated log probability. + :vartype top_logprobs: int + :ivar previous_response_id: The unique ID of the previous response to the model. Use this to +create multi-turn conversations. Learn more about +`conversation state `_. + :vartype previous_response_id: str + :ivar model: The model deployment to use for the creation of this response. + :vartype model: str + :ivar reasoning: + :vartype reasoning: ~azure.ai.projects.models.Reasoning + :ivar background: Whether to run the model response in the background. +`Learn more `_. + :vartype background: bool + :ivar max_output_tokens: An upper bound for the number of tokens that can be generated for a + response, including visible output tokens and `reasoning tokens `_. + :vartype max_output_tokens: int + :ivar max_tool_calls: The maximum number of total calls to built-in tools that can be processed + in a response. This maximum number applies across all built-in tool calls, not per individual + tool. Any further attempts to call a tool by the model will be ignored. + :vartype max_tool_calls: int + :ivar text: Configuration options for a text response from the model. Can be plain +text or structured JSON data. Learn more: + * [Text inputs and outputs](/docs/guides/text) + * [Structured Outputs](/docs/guides/structured-outputs). + :vartype text: ~azure.ai.projects.models.ResponseText + :ivar tools: An array of tools the model may call while generating a response. You +can specify which tool to use by setting the ``tool_choice`` parameter. +The two categories of tools you can provide the model are: + * **Built-in tools**: Tools that are provided by OpenAI that extend the +model's capabilities, like [web search](/docs/guides/tools-web-search) +or [file search](/docs/guides/tools-file-search). Learn more about +[built-in tools](/docs/guides/tools). + * **Function calls (custom tools)**: Functions that are defined by you, +enabling the model to call your own code. Learn more about +[function calling](/docs/guides/function-calling). + :vartype tools: list[~azure.ai.projects.models.Tool] + :ivar tool_choice: How the model should select which tool (or tools) to use when generating +a response. See the ``tools`` parameter to see how to specify which tools +the model can call. Is either a Union[str, "_models.ToolChoiceOptions"] type or a + ToolChoiceObject type. + :vartype tool_choice: str or ~azure.ai.projects.models.ToolChoiceOptions or + ~azure.ai.projects.models.ToolChoiceObject + :ivar prompt: + :vartype prompt: ~azure.ai.projects.models.Prompt + :ivar truncation: The truncation strategy to use for the model response. + * `auto`: If the context of this response and previous ones exceeds +the model's context window size, the model will truncate the +response to fit the context window by dropping input items in the +middle of the conversation. + * `disabled` (default): If a model response will exceed the context window +size for a model, the request will fail with a 400 error. Is either a Literal["auto"] type or a + Literal["disabled"] type. + :vartype truncation: str or str + :ivar id: Unique identifier for this Response. Required. + :vartype id: str + :ivar object: The object type of this resource - always set to ``response``. Required. Default + value is "response". + :vartype object: str + :ivar status: The status of the response generation. One of ``completed``, ``failed``, +``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: + Literal["completed"], Literal["failed"], Literal["in_progress"], Literal["cancelled"], + Literal["queued"], Literal["incomplete"] + :vartype status: str or str or str or str or str or str + :ivar created_at: Unix timestamp (in seconds) of when this Response was created. Required. + :vartype created_at: ~datetime.datetime + :ivar error: Required. + :vartype error: ~azure.ai.projects.models.ResponseError + :ivar incomplete_details: Details about why the response is incomplete. Required. + :vartype incomplete_details: ~azure.ai.projects.models.ResponseIncompleteDetails1 + :ivar output: An array of content items generated by the model. + * The length and order of items in the `output` array is dependent +on the model's response. + * Rather than accessing the first item in the `output` array and +assuming it's an `assistant` message with the content generated by +the model, you might consider using the `output_text` property where +supported in SDKs. Required. + :vartype output: list[~azure.ai.projects.models.ItemResource] + :ivar instructions: A system (or developer) message inserted into the model's context. +When using along with ``previous_response_id``, the instructions from a previous +response will not be carried over to the next response. This makes it simple +to swap out system (or developer) messages in new responses. Required. Is either a str type or + a [ItemParam] type. + :vartype instructions: str or list[~azure.ai.projects.models.ItemParam] + :ivar output_text: SDK-only convenience property that contains the aggregated text output +from all ``output_text`` items in the ``output`` array, if any are present. +Supported in the Python and JavaScript SDKs. + :vartype output_text: str + :ivar usage: + :vartype usage: ~azure.ai.projects.models.ResponseUsage + :ivar parallel_tool_calls: Whether to allow the model to run tool calls in parallel. Required. + :vartype parallel_tool_calls: bool + :ivar conversation: Required. + :vartype conversation: ~azure.ai.projects.models.ResponseConversation1 + :ivar agent: The agent used for this response. + :vartype agent: ~azure.ai.projects.models.AgentId + :ivar structured_inputs: The structured inputs to the response that can participate in prompt + template substitution or tool argument bindings. + :vartype structured_inputs: dict[str, any] + """ + + metadata: dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. + Keys are strings with a maximum length of 64 characters. Values are strings + with a maximum length of 512 characters. Required.""" + temperature: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output + more random, while lower values like 0.2 will make it more focused and deterministic. + We generally recommend altering this or ``top_p`` but not both. Required.""" + top_p: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An alternative to sampling with temperature, called nucleus sampling, + where the model considers the results of the tokens with top_p probability + mass. So 0.1 means only the tokens comprising the top 10% probability mass + are considered. + We generally recommend altering this or ``temperature`` but not both. Required.""" + user: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A unique identifier representing your end-user, which can help OpenAI to monitor and detect + abuse. `Learn more `_. Required.""" + service_tier: Optional[Union[str, "_models.ServiceTier"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Note: service_tier is not applicable to Azure OpenAI. Known values are: \"auto\", \"default\", + \"flex\", \"scale\", and \"priority\".""" + top_logprobs: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An integer between 0 and 20 specifying the number of most likely tokens to return at each token + position, each with an associated log probability.""" + previous_response_id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique ID of the previous response to the model. Use this to + create multi-turn conversations. Learn more about + `conversation state `_.""" + model: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The model deployment to use for the creation of this response.""" + reasoning: Optional["_models.Reasoning"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + background: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to run the model response in the background. + `Learn more `_.""" + max_output_tokens: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An upper bound for the number of tokens that can be generated for a response, including visible + output tokens and `reasoning tokens `_.""" + max_tool_calls: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of total calls to built-in tools that can be processed in a response. This + maximum number applies across all built-in tool calls, not per individual tool. Any further + attempts to call a tool by the model will be ignored.""" + text: Optional["_models.ResponseText"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Configuration options for a text response from the model. Can be plain + text or structured JSON data. Learn more: + * [Text inputs and outputs](/docs/guides/text) + * [Structured Outputs](/docs/guides/structured-outputs).""" + tools: Optional[list["_models.Tool"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of tools the model may call while generating a response. You + can specify which tool to use by setting the ``tool_choice`` parameter. + The two categories of tools you can provide the model are: + * **Built-in tools**: Tools that are provided by OpenAI that extend the + model's capabilities, like [web search](/docs/guides/tools-web-search) + or [file search](/docs/guides/tools-file-search). Learn more about + [built-in tools](/docs/guides/tools). + * **Function calls (custom tools)**: Functions that are defined by you, + enabling the model to call your own code. Learn more about + [function calling](/docs/guides/function-calling).""" + tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """How the model should select which tool (or tools) to use when generating + a response. See the ``tools`` parameter to see how to specify which tools + the model can call. Is either a Union[str, \"_models.ToolChoiceOptions\"] type or a + ToolChoiceObject type.""" + prompt: Optional["_models.Prompt"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + truncation: Optional[Literal["auto", "disabled"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The truncation strategy to use for the model response. + * `auto`: If the context of this response and previous ones exceeds + the model's context window size, the model will truncate the + response to fit the context window by dropping input items in the + middle of the conversation. + * `disabled` (default): If a model response will exceed the context window + size for a model, the request will fail with a 400 error. Is either a Literal[\"auto\"] type or + a Literal[\"disabled\"] type.""" + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier for this Response. Required.""" + object: Literal["response"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The object type of this resource - always set to ``response``. Required. Default value is + \"response\".""" + status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the response generation. One of ``completed``, ``failed``, + ``in_progress``, ``cancelled``, ``queued``, or ``incomplete``. Is one of the following types: + Literal[\"completed\"], Literal[\"failed\"], Literal[\"in_progress\"], Literal[\"cancelled\"], + Literal[\"queued\"], Literal[\"incomplete\"]""" + created_at: datetime.datetime = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="unix-timestamp" + ) + """Unix timestamp (in seconds) of when this Response was created. Required.""" + error: "_models.ResponseError" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + incomplete_details: "_models.ResponseIncompleteDetails1" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Details about why the response is incomplete. Required.""" + output: list["_models.ItemResource"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of content items generated by the model. + * The length and order of items in the `output` array is dependent + on the model's response. + * Rather than accessing the first item in the `output` array and + assuming it's an `assistant` message with the content generated by + the model, you might consider using the `output_text` property where + supported in SDKs. Required.""" + instructions: Union[str, list["_models.ItemParam"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A system (or developer) message inserted into the model's context. + When using along with ``previous_response_id``, the instructions from a previous + response will not be carried over to the next response. This makes it simple + to swap out system (or developer) messages in new responses. Required. Is either a str type or + a [ItemParam] type.""" + output_text: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """SDK-only convenience property that contains the aggregated text output + from all ``output_text`` items in the ``output`` array, if any are present. + Supported in the Python and JavaScript SDKs.""" + usage: Optional["_models.ResponseUsage"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + parallel_tool_calls: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to allow the model to run tool calls in parallel. Required.""" + conversation: "_models.ResponseConversation1" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + agent: Optional["_models.AgentId"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The agent used for this response.""" + structured_inputs: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The structured inputs to the response that can participate in prompt template substitution or + tool argument bindings.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + metadata: dict[str, str], + temperature: float, + top_p: float, + user: str, + id: str, # pylint: disable=redefined-builtin + created_at: datetime.datetime, + error: "_models.ResponseError", + incomplete_details: "_models.ResponseIncompleteDetails1", + output: list["_models.ItemResource"], + instructions: Union[str, list["_models.ItemParam"]], + parallel_tool_calls: bool, + conversation: "_models.ResponseConversation1", + service_tier: Optional[Union[str, "_models.ServiceTier"]] = None, + top_logprobs: Optional[int] = None, + previous_response_id: Optional[str] = None, + model: Optional[str] = None, + reasoning: Optional["_models.Reasoning"] = None, + background: Optional[bool] = None, + max_output_tokens: Optional[int] = None, + max_tool_calls: Optional[int] = None, + text: Optional["_models.ResponseText"] = None, + tools: Optional[list["_models.Tool"]] = None, + tool_choice: Optional[Union[str, "_models.ToolChoiceOptions", "_models.ToolChoiceObject"]] = None, + prompt: Optional["_models.Prompt"] = None, + truncation: Optional[Literal["auto", "disabled"]] = None, + status: Optional[Literal["completed", "failed", "in_progress", "cancelled", "queued", "incomplete"]] = None, + output_text: Optional[str] = None, + usage: Optional["_models.ResponseUsage"] = None, + agent: Optional["_models.AgentId"] = None, + structured_inputs: Optional[dict[str, Any]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.object: Literal["response"] = "response" + + +class ResponseStreamEvent(_Model): + """ResponseStreamEvent. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponseErrorEvent, ResponseCodeInterpreterCallCompletedEvent, + ResponseCodeInterpreterCallInProgressEvent, ResponseCodeInterpreterCallInterpretingEvent, + ResponseCodeInterpreterCallCodeDeltaEvent, ResponseCodeInterpreterCallCodeDoneEvent, + ResponseCompletedEvent, ResponseContentPartAddedEvent, ResponseContentPartDoneEvent, + ResponseCreatedEvent, ResponseFailedEvent, ResponseFileSearchCallCompletedEvent, + ResponseFileSearchCallInProgressEvent, ResponseFileSearchCallSearchingEvent, + ResponseFunctionCallArgumentsDeltaEvent, ResponseFunctionCallArgumentsDoneEvent, + ResponseImageGenCallCompletedEvent, ResponseImageGenCallGeneratingEvent, + ResponseImageGenCallInProgressEvent, ResponseImageGenCallPartialImageEvent, + ResponseInProgressEvent, ResponseIncompleteEvent, ResponseMCPCallArgumentsDeltaEvent, + ResponseMCPCallArgumentsDoneEvent, ResponseMCPCallCompletedEvent, ResponseMCPCallFailedEvent, + ResponseMCPCallInProgressEvent, ResponseMCPListToolsCompletedEvent, + ResponseMCPListToolsFailedEvent, ResponseMCPListToolsInProgressEvent, + ResponseOutputItemAddedEvent, ResponseOutputItemDoneEvent, ResponseTextDeltaEvent, + ResponseTextDoneEvent, ResponseQueuedEvent, ResponseReasoningDeltaEvent, + ResponseReasoningDoneEvent, ResponseReasoningSummaryDeltaEvent, + ResponseReasoningSummaryDoneEvent, ResponseReasoningSummaryPartAddedEvent, + ResponseReasoningSummaryPartDoneEvent, ResponseReasoningSummaryTextDeltaEvent, + ResponseReasoningSummaryTextDoneEvent, ResponseRefusalDeltaEvent, ResponseRefusalDoneEvent, + ResponseWebSearchCallCompletedEvent, ResponseWebSearchCallInProgressEvent, + ResponseWebSearchCallSearchingEvent + + :ivar type: Required. Known values are: "response.audio.delta", "response.audio.done", + "response.audio_transcript.delta", "response.audio_transcript.done", + "response.code_interpreter_call_code.delta", "response.code_interpreter_call_code.done", + "response.code_interpreter_call.completed", "response.code_interpreter_call.in_progress", + "response.code_interpreter_call.interpreting", "response.completed", + "response.content_part.added", "response.content_part.done", "response.created", "error", + "response.file_search_call.completed", "response.file_search_call.in_progress", + "response.file_search_call.searching", "response.function_call_arguments.delta", + "response.function_call_arguments.done", "response.in_progress", "response.failed", + "response.incomplete", "response.output_item.added", "response.output_item.done", + "response.refusal.delta", "response.refusal.done", "response.output_text.annotation.added", + "response.output_text.delta", "response.output_text.done", + "response.reasoning_summary_part.added", "response.reasoning_summary_part.done", + "response.reasoning_summary_text.delta", "response.reasoning_summary_text.done", + "response.web_search_call.completed", "response.web_search_call.in_progress", + "response.web_search_call.searching", "response.image_generation_call.completed", + "response.image_generation_call.generating", "response.image_generation_call.in_progress", + "response.image_generation_call.partial_image", "response.mcp_call.arguments_delta", + "response.mcp_call.arguments_done", "response.mcp_call.completed", "response.mcp_call.failed", + "response.mcp_call.in_progress", "response.mcp_list_tools.completed", + "response.mcp_list_tools.failed", "response.mcp_list_tools.in_progress", "response.queued", + "response.reasoning.delta", "response.reasoning.done", "response.reasoning_summary.delta", and + "response.reasoning_summary.done". + :vartype type: str or ~azure.ai.projects.models.ResponseStreamEventType + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"response.audio.delta\", \"response.audio.done\", + \"response.audio_transcript.delta\", \"response.audio_transcript.done\", + \"response.code_interpreter_call_code.delta\", \"response.code_interpreter_call_code.done\", + \"response.code_interpreter_call.completed\", \"response.code_interpreter_call.in_progress\", + \"response.code_interpreter_call.interpreting\", \"response.completed\", + \"response.content_part.added\", \"response.content_part.done\", \"response.created\", + \"error\", \"response.file_search_call.completed\", \"response.file_search_call.in_progress\", + \"response.file_search_call.searching\", \"response.function_call_arguments.delta\", + \"response.function_call_arguments.done\", \"response.in_progress\", \"response.failed\", + \"response.incomplete\", \"response.output_item.added\", \"response.output_item.done\", + \"response.refusal.delta\", \"response.refusal.done\", + \"response.output_text.annotation.added\", \"response.output_text.delta\", + \"response.output_text.done\", \"response.reasoning_summary_part.added\", + \"response.reasoning_summary_part.done\", \"response.reasoning_summary_text.delta\", + \"response.reasoning_summary_text.done\", \"response.web_search_call.completed\", + \"response.web_search_call.in_progress\", \"response.web_search_call.searching\", + \"response.image_generation_call.completed\", \"response.image_generation_call.generating\", + \"response.image_generation_call.in_progress\", + \"response.image_generation_call.partial_image\", \"response.mcp_call.arguments_delta\", + \"response.mcp_call.arguments_done\", \"response.mcp_call.completed\", + \"response.mcp_call.failed\", \"response.mcp_call.in_progress\", + \"response.mcp_list_tools.completed\", \"response.mcp_list_tools.failed\", + \"response.mcp_list_tools.in_progress\", \"response.queued\", \"response.reasoning.delta\", + \"response.reasoning.done\", \"response.reasoning_summary.delta\", and + \"response.reasoning_summary.done\".""" + sequence_number: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The sequence number for this event. Required.""" + + @overload + def __init__( + self, + *, + type: str, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseCodeInterpreterCallCodeDeltaEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call_code.delta" +): # pylint: disable=name-too-long + """Emitted when a partial code snippet is streamed by the code interpreter. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call_code.delta``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA + :ivar output_index: The index of the output item in the response for which the code is being + streamed. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + :ivar delta: The partial code snippet being streamed by the code interpreter. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call_code.delta``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code is being streamed. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial code snippet being streamed by the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DELTA # type: ignore + + +class ResponseCodeInterpreterCallCodeDoneEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call_code.done" +): + """Emitted when the code snippet is finalized by the code interpreter. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call_code.done``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE + :ivar output_index: The index of the output item in the response for which the code is + finalized. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + :ivar code: The final code snippet output by the code interpreter. Required. + :vartype code: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call_code.done``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code is finalized. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The final code snippet output by the code interpreter. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + code: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_CODE_DONE # type: ignore + + +class ResponseCodeInterpreterCallCompletedEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call.completed" +): # pylint: disable=name-too-long + """Emitted when the code interpreter call is completed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call.completed``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED + :ivar output_index: The index of the output item in the response for which the code interpreter + call is completed. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call.completed``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code interpreter call is completed. + Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_COMPLETED # type: ignore + + +class ResponseCodeInterpreterCallInProgressEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call.in_progress" +): # pylint: disable=name-too-long + """Emitted when a code interpreter call is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call.in_progress``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS + :ivar output_index: The index of the output item in the response for which the code interpreter + call is in progress. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call.in_progress``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code interpreter call is in + progress. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_IN_PROGRESS # type: ignore + + +class ResponseCodeInterpreterCallInterpretingEvent( + ResponseStreamEvent, discriminator="response.code_interpreter_call.interpreting" +): # pylint: disable=name-too-long + """Emitted when the code interpreter is actively interpreting the code snippet. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.code_interpreter_call.interpreting``. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING + :ivar output_index: The index of the output item in the response for which the code interpreter + is interpreting code. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the code interpreter tool call item. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.code_interpreter_call.interpreting``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response for which the code interpreter is interpreting + code. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the code interpreter tool call item. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CODE_INTERPRETER_CALL_INTERPRETING # type: ignore + + +class ResponseCompletedEvent(ResponseStreamEvent, discriminator="response.completed"): + """Emitted when the model response is complete. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.completed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_COMPLETED + :ivar response: Properties of the completed response. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.completed``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Properties of the completed response. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_COMPLETED # type: ignore + + +class ResponseContentPartAddedEvent(ResponseStreamEvent, discriminator="response.content_part.added"): + """Emitted when a new content part is added. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.content_part.added``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_ADDED + :ivar item_id: The ID of the output item that the content part was added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the content part was added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that was added. Required. + :vartype content_index: int + :ivar part: The content part that was added. Required. + :vartype part: ~azure.ai.projects.models.ItemContent + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.content_part.added``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the content part was added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the content part was added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that was added. Required.""" + part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content part that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + part: "_models.ItemContent", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_ADDED # type: ignore + + +class ResponseContentPartDoneEvent(ResponseStreamEvent, discriminator="response.content_part.done"): + """Emitted when a content part is done. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.content_part.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CONTENT_PART_DONE + :ivar item_id: The ID of the output item that the content part was added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the content part was added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that is done. Required. + :vartype content_index: int + :ivar part: The content part that is done. Required. + :vartype part: ~azure.ai.projects.models.ItemContent + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.content_part.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the content part was added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the content part was added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that is done. Required.""" + part: "_models.ItemContent" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content part that is done. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + part: "_models.ItemContent", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CONTENT_PART_DONE # type: ignore + + +class ResponseConversation1(_Model): + """ResponseConversation1. + + :ivar id: Required. + :vartype id: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseCreatedEvent(ResponseStreamEvent, discriminator="response.created"): + """An event that is emitted when a response is created. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.created``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_CREATED + :ivar response: The response that was created. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_CREATED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.created``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that was created. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_CREATED # type: ignore + + +class ResponseError(_Model): + """An error object returned when the model fails to generate a Response. + + :ivar code: Required. Known values are: "server_error", "rate_limit_exceeded", + "invalid_prompt", "vector_store_timeout", "invalid_image", "invalid_image_format", + "invalid_base64_image", "invalid_image_url", "image_too_large", "image_too_small", + "image_parse_error", "image_content_policy_violation", "invalid_image_mode", + "image_file_too_large", "unsupported_image_media_type", "empty_image_file", + "failed_to_download_image", and "image_file_not_found". + :vartype code: str or ~azure.ai.projects.models.ResponseErrorCode + :ivar message: A human-readable description of the error. Required. + :vartype message: str + """ + + code: Union[str, "_models.ResponseErrorCode"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Known values are: \"server_error\", \"rate_limit_exceeded\", \"invalid_prompt\", + \"vector_store_timeout\", \"invalid_image\", \"invalid_image_format\", + \"invalid_base64_image\", \"invalid_image_url\", \"image_too_large\", \"image_too_small\", + \"image_parse_error\", \"image_content_policy_violation\", \"invalid_image_mode\", + \"image_file_too_large\", \"unsupported_image_media_type\", \"empty_image_file\", + \"failed_to_download_image\", and \"image_file_not_found\".""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the error. Required.""" + + @overload + def __init__( + self, + *, + code: Union[str, "_models.ResponseErrorCode"], + message: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseErrorEvent(ResponseStreamEvent, discriminator="error"): + """Emitted when an error occurs. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``error``. Required. + :vartype type: str or ~azure.ai.projects.models.ERROR + :ivar code: The error code. Required. + :vartype code: str + :ivar message: The error message. Required. + :vartype message: str + :ivar param: The error parameter. Required. + :vartype param: str + """ + + type: Literal[ResponseStreamEventType.ERROR] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``error``. Required.""" + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error message. Required.""" + param: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error parameter. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + code: str, + message: str, + param: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.ERROR # type: ignore + + +class ResponseFailedEvent(ResponseStreamEvent, discriminator="response.failed"): + """An event that is emitted when a response fails. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.failed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FAILED + :ivar response: The response that failed. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.failed``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that failed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FAILED # type: ignore + + +class ResponseFileSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.file_search_call.completed"): + """Emitted when a file search call is completed (results found). + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.file_search_call.completed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_COMPLETED + :ivar output_index: The index of the output item that the file search call is initiated. + Required. + :vartype output_index: int + :ivar item_id: The ID of the output item that the file search call is initiated. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.file_search_call.completed``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the file search call is initiated. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the file search call is initiated. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_COMPLETED # type: ignore + + +class ResponseFileSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.file_search_call.in_progress"): + """Emitted when a file search call is initiated. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.file_search_call.in_progress``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS + :ivar output_index: The index of the output item that the file search call is initiated. + Required. + :vartype output_index: int + :ivar item_id: The ID of the output item that the file search call is initiated. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.file_search_call.in_progress``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the file search call is initiated. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the file search call is initiated. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_IN_PROGRESS # type: ignore + + +class ResponseFileSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.file_search_call.searching"): + """Emitted when a file search is currently searching. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.file_search_call.searching``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FILE_SEARCH_CALL_SEARCHING + :ivar output_index: The index of the output item that the file search call is searching. + Required. + :vartype output_index: int + :ivar item_id: The ID of the output item that the file search call is initiated. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.file_search_call.searching``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the file search call is searching. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the file search call is initiated. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING # type: ignore + + +class ResponseFormatJsonSchemaSchema(_Model): + """The schema for the response format, described as a JSON Schema object. + Learn how to build JSON schemas `here `_. + + """ + + +class ResponseFunctionCallArgumentsDeltaEvent( + ResponseStreamEvent, discriminator="response.function_call_arguments.delta" +): + """Emitted when there is a partial function-call arguments delta. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.function_call_arguments.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA + :ivar item_id: The ID of the output item that the function-call arguments delta is added to. + Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the function-call arguments delta is + added to. Required. + :vartype output_index: int + :ivar delta: The function-call arguments delta that is added. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.function_call_arguments.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the function-call arguments delta is added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the function-call arguments delta is added to. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The function-call arguments delta that is added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DELTA # type: ignore + + +class ResponseFunctionCallArgumentsDoneEvent( + ResponseStreamEvent, discriminator="response.function_call_arguments.done" +): + """Emitted when function-call arguments are finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE + :ivar item_id: The ID of the item. Required. + :vartype item_id: str + :ivar output_index: The index of the output item. Required. + :vartype output_index: int + :ivar arguments: The function-call arguments. Required. + :vartype arguments: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item. Required.""" + arguments: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The function-call arguments. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + arguments: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_FUNCTION_CALL_ARGUMENTS_DONE # type: ignore + + +class ResponseImageGenCallCompletedEvent(ResponseStreamEvent, discriminator="response.image_generation_call.completed"): + """Emitted when an image generation tool call has completed and the final image is available. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.completed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.completed'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_COMPLETED # type: ignore + + +class ResponseImageGenCallGeneratingEvent( + ResponseStreamEvent, discriminator="response.image_generation_call.generating" +): + """Emitted when an image generation tool call is actively generating an image (intermediate + state). + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.generating'. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_GENERATING + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.generating'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_GENERATING # type: ignore + + +class ResponseImageGenCallInProgressEvent( + ResponseStreamEvent, discriminator="response.image_generation_call.in_progress" +): + """Emitted when an image generation tool call is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.in_progress'. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.in_progress'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_IN_PROGRESS # type: ignore + + +class ResponseImageGenCallPartialImageEvent( + ResponseStreamEvent, discriminator="response.image_generation_call.partial_image" +): + """Emitted when a partial image is available during image generation streaming. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.image_generation_call.partial_image'. + Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the image generation item being processed. Required. + :vartype item_id: str + :ivar partial_image_index: 0-based index for the partial image (backend is 1-based, but this is + 0-based for the user). Required. + :vartype partial_image_index: int + :ivar partial_image_b64: Base64-encoded partial image data, suitable for rendering as an image. + Required. + :vartype partial_image_b64: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.image_generation_call.partial_image'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the image generation item being processed. Required.""" + partial_image_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """0-based index for the partial image (backend is 1-based, but this is 0-based for the user). + Required.""" + partial_image_b64: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Base64-encoded partial image data, suitable for rendering as an image. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + partial_image_index: int, + partial_image_b64: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IMAGE_GENERATION_CALL_PARTIAL_IMAGE # type: ignore + + +class ResponseIncompleteDetails1(_Model): + """ResponseIncompleteDetails1. + + :ivar reason: The reason why the response is incomplete. Is either a + Literal["max_output_tokens"] type or a Literal["content_filter"] type. + :vartype reason: str or str + """ + + reason: Optional[Literal["max_output_tokens", "content_filter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The reason why the response is incomplete. Is either a Literal[\"max_output_tokens\"] type or a + Literal[\"content_filter\"] type.""" + + @overload + def __init__( + self, + *, + reason: Optional[Literal["max_output_tokens", "content_filter"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseIncompleteEvent(ResponseStreamEvent, discriminator="response.incomplete"): + """An event that is emitted when a response finishes as incomplete. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.incomplete``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_INCOMPLETE + :ivar response: The response that was incomplete. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_INCOMPLETE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.incomplete``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that was incomplete. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_INCOMPLETE # type: ignore + + +class ResponseInProgressEvent(ResponseStreamEvent, discriminator="response.in_progress"): + """Emitted when the response is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.in_progress``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_IN_PROGRESS + :ivar response: The response that is in progress. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.in_progress``. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The response that is in progress. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_IN_PROGRESS # type: ignore + + +class ResponseMCPCallArgumentsDeltaEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_delta"): + """Emitted when there is a delta (partial update) to the arguments of an MCP tool call. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.arguments_delta'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DELTA + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. + :vartype item_id: str + :ivar delta: The partial update to the arguments for the MCP tool call. Required. + :vartype delta: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.arguments_delta'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the MCP tool call item being processed. Required.""" + delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial update to the arguments for the MCP tool call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + delta: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DELTA # type: ignore + + +class ResponseMCPCallArgumentsDoneEvent(ResponseStreamEvent, discriminator="response.mcp_call.arguments_done"): + """Emitted when the arguments for an MCP tool call are finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.arguments_done'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_ARGUMENTS_DONE + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. + :vartype item_id: str + :ivar arguments: The finalized arguments for the MCP tool call. Required. + :vartype arguments: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.arguments_done'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the MCP tool call item being processed. Required.""" + arguments: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The finalized arguments for the MCP tool call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + arguments: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_ARGUMENTS_DONE # type: ignore + + +class ResponseMCPCallCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_call.completed"): + """Emitted when an MCP tool call has completed successfully. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.completed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_COMPLETED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.completed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_COMPLETED # type: ignore + + +class ResponseMCPCallFailedEvent(ResponseStreamEvent, discriminator="response.mcp_call.failed"): + """Emitted when an MCP tool call has failed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.failed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_FAILED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.failed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_FAILED # type: ignore + + +class ResponseMCPCallInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_call.in_progress"): + """Emitted when an MCP tool call is in progress. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_call.in_progress'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_CALL_IN_PROGRESS + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar item_id: The unique identifier of the MCP tool call item being processed. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_call.in_progress'. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the MCP tool call item being processed. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_CALL_IN_PROGRESS # type: ignore + + +class ResponseMCPListToolsCompletedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.completed"): + """Emitted when the list of available MCP tools has been successfully retrieved. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_list_tools.completed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_COMPLETED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_list_tools.completed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_COMPLETED # type: ignore + + +class ResponseMCPListToolsFailedEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.failed"): + """Emitted when the attempt to list available MCP tools has failed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_list_tools.failed'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_FAILED + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_list_tools.failed'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_FAILED # type: ignore + + +class ResponseMCPListToolsInProgressEvent(ResponseStreamEvent, discriminator="response.mcp_list_tools.in_progress"): + """Emitted when the system is in the process of retrieving the list of available MCP tools. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.mcp_list_tools.in_progress'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS + """ + + type: Literal[ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.mcp_list_tools.in_progress'. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_MCP_LIST_TOOLS_IN_PROGRESS # type: ignore + + +class ResponseOutputItemAddedEvent(ResponseStreamEvent, discriminator="response.output_item.added"): + """Emitted when a new output item is added. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_item.added``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_ADDED + :ivar output_index: The index of the output item that was added. Required. + :vartype output_index: int + :ivar item: The output item that was added. Required. + :vartype item: ~azure.ai.projects.models.ItemResource + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_item.added``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that was added. Required.""" + item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output item that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item: "_models.ItemResource", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_ADDED # type: ignore + + +class ResponseOutputItemDoneEvent(ResponseStreamEvent, discriminator="response.output_item.done"): + """Emitted when an output item is marked done. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_item.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_ITEM_DONE + :ivar output_index: The index of the output item that was marked done. Required. + :vartype output_index: int + :ivar item: The output item that was marked done. Required. + :vartype item: ~azure.ai.projects.models.ItemResource + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_item.done``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that was marked done. Required.""" + item: "_models.ItemResource" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The output item that was marked done. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item: "_models.ItemResource", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_ITEM_DONE # type: ignore + + +class ResponsePromptVariables(_Model): + """Optional map of values to substitute in for variables in your + prompt. The substitution values can either be strings, or other + Response input types like images or files. + + """ + + +class ResponseQueuedEvent(ResponseStreamEvent, discriminator="response.queued"): + """Emitted when a response is queued and waiting to be processed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.queued'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_QUEUED + :ivar response: The full response object that is queued. Required. + :vartype response: ~azure.ai.projects.models.Response + """ + + type: Literal[ResponseStreamEventType.RESPONSE_QUEUED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.queued'. Required.""" + response: "_models.Response" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The full response object that is queued. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + response: "_models.Response", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_QUEUED # type: ignore + + +class ResponseReasoningDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning.delta"): + """Emitted when there is a delta (partial update) to the reasoning content. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning.delta'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DELTA + :ivar item_id: The unique identifier of the item for which reasoning is being updated. + Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar content_index: The index of the reasoning content part within the output item. Required. + :vartype content_index: int + :ivar delta: The partial update to the reasoning content. Required. + :vartype delta: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning.delta'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which reasoning is being updated. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the reasoning content part within the output item. Required.""" + delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial update to the reasoning content. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + delta: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_DELTA # type: ignore + + +class ResponseReasoningDoneEvent(ResponseStreamEvent, discriminator="response.reasoning.done"): + """Emitted when the reasoning content is finalized for an item. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning.done'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_DONE + :ivar item_id: The unique identifier of the item for which reasoning is finalized. Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar content_index: The index of the reasoning content part within the output item. Required. + :vartype content_index: int + :ivar text: The finalized reasoning text. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning.done'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which reasoning is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the reasoning content part within the output item. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The finalized reasoning text. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_DONE # type: ignore + + +class ResponseReasoningSummaryDeltaEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.delta"): + """Emitted when there is a delta (partial update) to the reasoning summary content. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning_summary.delta'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DELTA + :ivar item_id: The unique identifier of the item for which the reasoning summary is being + updated. Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the output item. Required. + :vartype summary_index: int + :ivar delta: The partial update to the reasoning summary content. Required. + :vartype delta: any + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning_summary.delta'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which the reasoning summary is being updated. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the output item. Required.""" + delta: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The partial update to the reasoning summary content. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + delta: Any, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DELTA # type: ignore + + +class ResponseReasoningSummaryDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary.done"): + """Emitted when the reasoning summary content is finalized for an item. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always 'response.reasoning_summary.done'. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_DONE + :ivar item_id: The unique identifier of the item for which the reasoning summary is finalized. + Required. + :vartype item_id: str + :ivar output_index: The index of the output item in the response's output array. Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the output item. Required. + :vartype summary_index: int + :ivar text: The finalized reasoning summary text. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always 'response.reasoning_summary.done'. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The unique identifier of the item for which the reasoning summary is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item in the response's output array. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the output item. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The finalized reasoning summary text. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_DONE # type: ignore + + +class ResponseReasoningSummaryPartAddedEvent( + ResponseStreamEvent, discriminator="response.reasoning_summary_part.added" +): + """Emitted when a new reasoning summary part is added. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_part.added``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_ADDED + :ivar item_id: The ID of the item this summary part is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary part is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar part: The summary part that was added. Required. + :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_part.added``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary part is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary part is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The summary part that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + part: "_models.ReasoningItemSummaryPart", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_ADDED # type: ignore + + +class ResponseReasoningSummaryPartDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_part.done"): + """Emitted when a reasoning summary part is completed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_part.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_PART_DONE + :ivar item_id: The ID of the item this summary part is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary part is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar part: The completed summary part. Required. + :vartype part: ~azure.ai.projects.models.ReasoningItemSummaryPart + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_part.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary part is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary part is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + part: "_models.ReasoningItemSummaryPart" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The completed summary part. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + part: "_models.ReasoningItemSummaryPart", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_PART_DONE # type: ignore + + +class ResponseReasoningSummaryTextDeltaEvent( + ResponseStreamEvent, discriminator="response.reasoning_summary_text.delta" +): + """Emitted when a delta is added to a reasoning summary text. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_text.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DELTA + :ivar item_id: The ID of the item this summary text delta is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary text delta is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar delta: The text delta that was added to the summary. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_text.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary text delta is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary text delta is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text delta that was added to the summary. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DELTA # type: ignore + + +class ResponseReasoningSummaryTextDoneEvent(ResponseStreamEvent, discriminator="response.reasoning_summary_text.done"): + """Emitted when a reasoning summary text is completed. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.reasoning_summary_text.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REASONING_SUMMARY_TEXT_DONE + :ivar item_id: The ID of the item this summary text is associated with. Required. + :vartype item_id: str + :ivar output_index: The index of the output item this summary text is associated with. + Required. + :vartype output_index: int + :ivar summary_index: The index of the summary part within the reasoning summary. Required. + :vartype summary_index: int + :ivar text: The full text of the completed reasoning summary. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.reasoning_summary_text.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the item this summary text is associated with. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item this summary text is associated with. Required.""" + summary_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the summary part within the reasoning summary. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The full text of the completed reasoning summary. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + summary_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REASONING_SUMMARY_TEXT_DONE # type: ignore + + +class ResponseRefusalDeltaEvent(ResponseStreamEvent, discriminator="response.refusal.delta"): + """Emitted when there is a partial refusal text. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.refusal.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DELTA + :ivar item_id: The ID of the output item that the refusal text is added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the refusal text is added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the refusal text is added to. Required. + :vartype content_index: int + :ivar delta: The refusal text that is added. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.refusal.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the refusal text is added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the refusal text is added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the refusal text is added to. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The refusal text that is added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DELTA # type: ignore + + +class ResponseRefusalDoneEvent(ResponseStreamEvent, discriminator="response.refusal.done"): + """Emitted when refusal text is finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.refusal.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_REFUSAL_DONE + :ivar item_id: The ID of the output item that the refusal text is finalized. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the refusal text is finalized. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the refusal text is finalized. + Required. + :vartype content_index: int + :ivar refusal: The refusal text that is finalized. Required. + :vartype refusal: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_REFUSAL_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.refusal.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the refusal text is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the refusal text is finalized. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the refusal text is finalized. Required.""" + refusal: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The refusal text that is finalized. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + refusal: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_REFUSAL_DONE # type: ignore + + +class ResponsesMessageItemParam(ItemParam, discriminator="message"): + """A response message item, representing a role and content, as provided as client request + parameters. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponsesAssistantMessageItemParam, ResponsesDeveloperMessageItemParam, + ResponsesSystemMessageItemParam, ResponsesUserMessageItemParam + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role associated with the message. Required. Known values are: "system", + "developer", "user", and "assistant". + :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole + """ + + __mapping__: dict[str, _Model] = {} + type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the responses item, which is always 'message'. Required.""" + role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) + """The role associated with the message. Required. Known values are: \"system\", \"developer\", + \"user\", and \"assistant\".""" + + @overload + def __init__( + self, + *, + role: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MESSAGE # type: ignore + + +class ResponsesAssistantMessageItemParam(ResponsesMessageItemParam, discriminator="assistant"): + """A message parameter item with the ``assistant`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``assistant``. Required. + :vartype role: str or ~azure.ai.projects.models.ASSISTANT + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``assistant``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.ASSISTANT # type: ignore + + +class ResponsesMessageItemResource(ItemResource, discriminator="message"): + """A response message resource item, representing a role and content, as provided on service + responses. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponsesAssistantMessageItemResource, ResponsesDeveloperMessageItemResource, + ResponsesSystemMessageItemResource, ResponsesUserMessageItemResource + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role associated with the message. Required. Known values are: "system", + "developer", "user", and "assistant". + :vartype role: str or ~azure.ai.projects.models.ResponsesMessageRole + """ + + __mapping__: dict[str, _Model] = {} + type: Literal[ItemType.MESSAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the responses item, which is always 'message'. Required.""" + status: Literal["in_progress", "completed", "incomplete"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal[\"in_progress\"], Literal[\"completed\"], Literal[\"incomplete\"]""" + role: str = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) + """The role associated with the message. Required. Known values are: \"system\", \"developer\", + \"user\", and \"assistant\".""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + role: str, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.MESSAGE # type: ignore + + +class ResponsesAssistantMessageItemResource(ResponsesMessageItemResource, discriminator="assistant"): + """A message resource item with the ``assistant`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``assistant``. Required. + :vartype role: str or ~azure.ai.projects.models.ASSISTANT + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.ASSISTANT] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``assistant``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.ASSISTANT # type: ignore + + +class ResponsesDeveloperMessageItemParam(ResponsesMessageItemParam, discriminator="developer"): + """A message parameter item with the ``developer`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``developer``. Required. + :vartype role: str or ~azure.ai.projects.models.DEVELOPER + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``developer``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.DEVELOPER # type: ignore + + +class ResponsesDeveloperMessageItemResource(ResponsesMessageItemResource, discriminator="developer"): + """A message resource item with the ``developer`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``developer``. Required. + :vartype role: str or ~azure.ai.projects.models.DEVELOPER + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.DEVELOPER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``developer``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.DEVELOPER # type: ignore + + +class ResponsesSystemMessageItemParam(ResponsesMessageItemParam, discriminator="system"): + """A message parameter item with the ``system`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``system``. Required. + :vartype role: str or ~azure.ai.projects.models.SYSTEM + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``system``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.SYSTEM # type: ignore + + +class ResponsesSystemMessageItemResource(ResponsesMessageItemResource, discriminator="system"): + """A message resource item with the ``system`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``system``. Required. + :vartype role: str or ~azure.ai.projects.models.SYSTEM + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.SYSTEM] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``system``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.SYSTEM # type: ignore + + +class ResponsesUserMessageItemParam(ResponsesMessageItemParam, discriminator="user"): + """A message parameter item with the ``user`` role. + + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar role: The role of the message, which is always ``user``. Required. + :vartype role: str or ~azure.ai.projects.models.USER + :ivar content: The content associated with the message. Required. Is either a str type or a + [ItemContent] type. + :vartype content: str or list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``user``. Required.""" + content: Union["str", list["_models.ItemContent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The content associated with the message. Required. Is either a str type or a [ItemContent] + type.""" + + @overload + def __init__( + self, + *, + content: Union[str, list["_models.ItemContent"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.USER # type: ignore + + +class ResponsesUserMessageItemResource(ResponsesMessageItemResource, discriminator="user"): + """A message resource item with the ``user`` role. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: The type of the responses item, which is always 'message'. Required. + :vartype type: str or ~azure.ai.projects.models.MESSAGE + :ivar status: The status of the item. One of ``in_progress``, ``completed``, or + ``incomplete``. Populated when items are returned via API. Required. Is one of the following + types: Literal["in_progress"], Literal["completed"], Literal["incomplete"] + :vartype status: str or str or str + :ivar role: The role of the message, which is always ``user``. Required. + :vartype role: str or ~azure.ai.projects.models.USER + :ivar content: The content associated with the message. Required. + :vartype content: list[~azure.ai.projects.models.ItemContent] + """ + + __mapping__: dict[str, _Model] = {} + role: Literal[ResponsesMessageRole.USER] = rest_discriminator(name="role", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The role of the message, which is always ``user``. Required.""" + content: list["_models.ItemContent"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The content associated with the message. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "completed", "incomplete"], + content: list["_models.ItemContent"], + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.role = ResponsesMessageRole.USER # type: ignore + + +class ResponseText(_Model): + """ResponseText. + + :ivar format: + :vartype format: ~azure.ai.projects.models.ResponseTextFormatConfiguration + """ + + format: Optional["_models.ResponseTextFormatConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + + @overload + def __init__( + self, + *, + format: Optional["_models.ResponseTextFormatConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseTextDeltaEvent(ResponseStreamEvent, discriminator="response.output_text.delta"): + """Emitted when there is an additional text delta. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_text.delta``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DELTA + :ivar item_id: The ID of the output item that the text delta was added to. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the text delta was added to. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the text delta was added to. Required. + :vartype content_index: int + :ivar delta: The text delta that was added. Required. + :vartype delta: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_text.delta``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the text delta was added to. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the text delta was added to. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the text delta was added to. Required.""" + delta: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text delta that was added. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + delta: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DELTA # type: ignore + + +class ResponseTextDoneEvent(ResponseStreamEvent, discriminator="response.output_text.done"): + """Emitted when text content is finalized. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.output_text.done``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_OUTPUT_TEXT_DONE + :ivar item_id: The ID of the output item that the text content is finalized. Required. + :vartype item_id: str + :ivar output_index: The index of the output item that the text content is finalized. Required. + :vartype output_index: int + :ivar content_index: The index of the content part that the text content is finalized. + Required. + :vartype content_index: int + :ivar text: The text content that is finalized. Required. + :vartype text: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.output_text.done``. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The ID of the output item that the text content is finalized. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the text content is finalized. Required.""" + content_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the content part that the text content is finalized. Required.""" + text: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The text content that is finalized. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + item_id: str, + output_index: int, + content_index: int, + text: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_OUTPUT_TEXT_DONE # type: ignore + + +class ResponseTextFormatConfiguration(_Model): + """ResponseTextFormatConfiguration. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ResponseTextFormatConfigurationJsonObject, ResponseTextFormatConfigurationJsonSchema, + ResponseTextFormatConfigurationText + + :ivar type: Required. Known values are: "text", "json_schema", and "json_object". + :vartype type: str or ~azure.ai.projects.models.ResponseTextFormatConfigurationType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"text\", \"json_schema\", and \"json_object\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseTextFormatConfigurationJsonObject( + ResponseTextFormatConfiguration, discriminator="json_object" +): # pylint: disable=name-too-long + """ResponseTextFormatConfigurationJsonObject. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.JSON_OBJECT + """ + + type: Literal[ResponseTextFormatConfigurationType.JSON_OBJECT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseTextFormatConfigurationType.JSON_OBJECT # type: ignore + + +class ResponseTextFormatConfigurationJsonSchema( + ResponseTextFormatConfiguration, discriminator="json_schema" +): # pylint: disable=name-too-long + """JSON Schema response format. Used to generate structured JSON responses. + Learn more about `Structured Outputs `_. + + :ivar type: The type of response format being defined. Always ``json_schema``. Required. + :vartype type: str or ~azure.ai.projects.models.JSON_SCHEMA + :ivar description: A description of what the response format is for, used by the model to + determine how to respond in the format. + :vartype description: str + :ivar name: The name of the response format. Must be a-z, A-Z, 0-9, or contain + underscores and dashes, with a maximum length of 64. Required. + :vartype name: str + :ivar schema: Required. + :vartype schema: ~azure.ai.projects.models.ResponseFormatJsonSchemaSchema + :ivar strict: Whether to enable strict schema adherence when generating the output. + If set to true, the model will always follow the exact schema defined + in the ``schema`` field. Only a subset of JSON Schema is supported when + ``strict`` is ``true``. To learn more, read the `Structured Outputs + guide `_. + :vartype strict: bool + """ + + type: Literal[ResponseTextFormatConfigurationType.JSON_SCHEMA] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of response format being defined. Always ``json_schema``. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of what the response format is for, used by the model to + determine how to respond in the format.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the response format. Must be a-z, A-Z, 0-9, or contain + underscores and dashes, with a maximum length of 64. Required.""" + schema: "_models.ResponseFormatJsonSchemaSchema" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Required.""" + strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enable strict schema adherence when generating the output. + If set to true, the model will always follow the exact schema defined + in the ``schema`` field. Only a subset of JSON Schema is supported when + ``strict`` is ``true``. To learn more, read the `Structured Outputs + guide `_.""" + + @overload + def __init__( + self, + *, + name: str, + schema: "_models.ResponseFormatJsonSchemaSchema", + description: Optional[str] = None, + strict: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseTextFormatConfigurationType.JSON_SCHEMA # type: ignore + + +class ResponseTextFormatConfigurationText(ResponseTextFormatConfiguration, discriminator="text"): + """ResponseTextFormatConfigurationText. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.TEXT + """ + + type: Literal[ResponseTextFormatConfigurationType.TEXT] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseTextFormatConfigurationType.TEXT # type: ignore + + +class ResponseUsage(_Model): + """Represents token usage details including input tokens, output tokens, + a breakdown of output tokens, and the total tokens used. + + :ivar input_tokens: The number of input tokens. Required. + :vartype input_tokens: int + :ivar input_tokens_details: A detailed breakdown of the input tokens. Required. + :vartype input_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageInputTokensDetails + :ivar output_tokens: The number of output tokens. Required. + :vartype output_tokens: int + :ivar output_tokens_details: A detailed breakdown of the output tokens. Required. + :vartype output_tokens_details: + ~azure.ai.projects.models.MemoryStoreOperationUsageOutputTokensDetails + :ivar total_tokens: The total number of tokens used. Required. + :vartype total_tokens: int + """ + + input_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of input tokens. Required.""" + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the input tokens. Required.""" + output_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of output tokens. Required.""" + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A detailed breakdown of the output tokens. Required.""" + total_tokens: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The total number of tokens used. Required.""" + + @overload + def __init__( + self, + *, + input_tokens: int, + input_tokens_details: "_models.MemoryStoreOperationUsageInputTokensDetails", + output_tokens: int, + output_tokens_details: "_models.MemoryStoreOperationUsageOutputTokensDetails", + total_tokens: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResponseWebSearchCallCompletedEvent(ResponseStreamEvent, discriminator="response.web_search_call.completed"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.web_search_call.completed``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_COMPLETED + :ivar output_index: The index of the output item that the web search call is associated with. + Required. + :vartype output_index: int + :ivar item_id: Unique ID for the output item associated with the web search call. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.web_search_call.completed``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the web search call is associated with. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID for the output item associated with the web search call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_COMPLETED # type: ignore + + +class ResponseWebSearchCallInProgressEvent(ResponseStreamEvent, discriminator="response.web_search_call.in_progress"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.web_search_call.in_progress``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS + :ivar output_index: The index of the output item that the web search call is associated with. + Required. + :vartype output_index: int + :ivar item_id: Unique ID for the output item associated with the web search call. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.web_search_call.in_progress``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the web search call is associated with. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID for the output item associated with the web search call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_IN_PROGRESS # type: ignore + + +class ResponseWebSearchCallSearchingEvent(ResponseStreamEvent, discriminator="response.web_search_call.searching"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar sequence_number: The sequence number for this event. Required. + :vartype sequence_number: int + :ivar type: The type of the event. Always ``response.web_search_call.searching``. Required. + :vartype type: str or ~azure.ai.projects.models.RESPONSE_WEB_SEARCH_CALL_SEARCHING + :ivar output_index: The index of the output item that the web search call is associated with. + Required. + :vartype output_index: int + :ivar item_id: Unique ID for the output item associated with the web search call. Required. + :vartype item_id: str + """ + + type: Literal[ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the event. Always ``response.web_search_call.searching``. Required.""" + output_index: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The index of the output item that the web search call is associated with. Required.""" + item_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID for the output item associated with the web search call. Required.""" + + @overload + def __init__( + self, + *, + sequence_number: int, + output_index: int, + item_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ResponseStreamEventType.RESPONSE_WEB_SEARCH_CALL_SEARCHING # type: ignore + + +class SASCredentials(BaseCredentials, discriminator="SAS"): + """Shared Access Signature (SAS) credential definition. + + :ivar type: The credential type. Required. Shared Access Signature (SAS) credential + :vartype type: str or ~azure.ai.projects.models.SAS + :ivar sas_token: SAS token. + :vartype sas_token: str + """ + + type: Literal[CredentialType.SAS] = rest_discriminator(name="type", visibility=["read"]) # type: ignore + """The credential type. Required. Shared Access Signature (SAS) credential""" + sas_token: Optional[str] = rest_field(name="SAS", visibility=["read"]) + """SAS token.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = CredentialType.SAS # type: ignore + + +class Schedule(_Model): + """Schedule model. + + :ivar id: Identifier of the schedule. Required. + :vartype id: str + :ivar display_name: Name of the schedule. + :vartype display_name: str + :ivar description: Description of the schedule. + :vartype description: str + :ivar enabled: Enabled status of the schedule. Required. + :vartype enabled: bool + :ivar provisioning_status: Provisioning status of the schedule. Known values are: "Creating", + "Updating", "Deleting", "Succeeded", and "Failed". + :vartype provisioning_status: str or ~azure.ai.projects.models.ScheduleProvisioningStatus + :ivar trigger: Trigger for the schedule. Required. + :vartype trigger: ~azure.ai.projects.models.Trigger + :ivar task: Task for the schedule. Required. + :vartype task: ~azure.ai.projects.models.ScheduleTask + :ivar tags: Schedule's tags. Unlike properties, tags are fully mutable. + :vartype tags: dict[str, str] + :ivar properties: Schedule's properties. Unlike tags, properties are add-only. Once added, a + property cannot be removed. + :vartype properties: dict[str, str] + :ivar system_data: System metadata for the resource. Required. + :vartype system_data: dict[str, str] + """ + + id: str = rest_field(visibility=["read"]) + """Identifier of the schedule. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the schedule.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the schedule.""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Enabled status of the schedule. Required.""" + provisioning_status: Optional[Union[str, "_models.ScheduleProvisioningStatus"]] = rest_field( + name="provisioningStatus", visibility=["read"] + ) + """Provisioning status of the schedule. Known values are: \"Creating\", \"Updating\", + \"Deleting\", \"Succeeded\", and \"Failed\".""" + trigger: "_models.Trigger" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Trigger for the schedule. Required.""" + task: "_models.ScheduleTask" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Task for the schedule. Required.""" + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Schedule's tags. Unlike properties, tags are fully mutable.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Schedule's properties. Unlike tags, properties are add-only. Once added, a property cannot be + removed.""" + system_data: dict[str, str] = rest_field(name="systemData", visibility=["read"]) + """System metadata for the resource. Required.""" + + @overload + def __init__( + self, + *, + enabled: bool, + trigger: "_models.Trigger", + task: "_models.ScheduleTask", + display_name: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[dict[str, str]] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ScheduleRun(_Model): + """Schedule run model. + + :ivar id: Identifier of the schedule run. Required. + :vartype id: str + :ivar schedule_id: Identifier of the schedule. Required. + :vartype schedule_id: str + :ivar success: Trigger success status of the schedule run. Required. + :vartype success: bool + :ivar trigger_time: Trigger time of the schedule run. + :vartype trigger_time: str + :ivar error: Error information for the schedule run. + :vartype error: str + :ivar properties: Properties of the schedule run. Required. + :vartype properties: dict[str, str] + """ + + id: str = rest_field(visibility=["read"]) + """Identifier of the schedule run. Required.""" + schedule_id: str = rest_field(name="scheduleId", visibility=["read", "create", "update", "delete", "query"]) + """Identifier of the schedule. Required.""" + success: bool = rest_field(visibility=["read"]) + """Trigger success status of the schedule run. Required.""" + trigger_time: Optional[str] = rest_field( + name="triggerTime", visibility=["read", "create", "update", "delete", "query"] + ) + """Trigger time of the schedule run.""" + error: Optional[str] = rest_field(visibility=["read"]) + """Error information for the schedule run.""" + properties: dict[str, str] = rest_field(visibility=["read"]) + """Properties of the schedule run. Required.""" + + @overload + def __init__( + self, + *, + schedule_id: str, + trigger_time: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SharepointAgentTool(Tool, discriminator="sharepoint_grounding_preview"): + """The input definition information for a sharepoint tool as used to configure an agent. + + :ivar type: The object type, which is always 'sharepoint_grounding'. Required. + :vartype type: str or ~azure.ai.projects.models.SHAREPOINT_GROUNDING_PREVIEW + :ivar sharepoint_grounding_preview: The sharepoint grounding tool parameters. Required. + :vartype sharepoint_grounding_preview: + ~azure.ai.projects.models.SharepointGroundingToolParameters + """ + + type: Literal[ToolType.SHAREPOINT_GROUNDING_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The object type, which is always 'sharepoint_grounding'. Required.""" + sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The sharepoint grounding tool parameters. Required.""" + + @overload + def __init__( + self, + *, + sharepoint_grounding_preview: "_models.SharepointGroundingToolParameters", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.SHAREPOINT_GROUNDING_PREVIEW # type: ignore + + +class SharepointGroundingToolParameters(_Model): + """The sharepoint grounding tool parameters. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. + :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] + """ + + project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" + + @overload + def __init__( + self, + *, + project_connections: Optional[list["_models.ToolProjectConnection"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StructuredInputDefinition(_Model): + """An structured input that can participate in prompt template substitutions and tool argument + binding. + + :ivar description: A human-readable description of the input. + :vartype description: str + :ivar default_value: The default value for the input if no run-time value is provided. + :vartype default_value: any + :ivar tool_argument_bindings: When provided, the input value is bound to the specified tool + arguments. + :vartype tool_argument_bindings: list[~azure.ai.projects.models.ToolArgumentBinding] + :ivar schema: The JSON schema for the structured input (optional). + :vartype schema: any + :ivar required: Whether the input property is required when the agent is invoked. + :vartype required: bool + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable description of the input.""" + default_value: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The default value for the input if no run-time value is provided.""" + tool_argument_bindings: Optional[list["_models.ToolArgumentBinding"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """When provided, the input value is bound to the specified tool arguments.""" + schema: Optional[Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema for the structured input (optional).""" + required: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether the input property is required when the agent is invoked.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + default_value: Optional[Any] = None, + tool_argument_bindings: Optional[list["_models.ToolArgumentBinding"]] = None, + schema: Optional[Any] = None, + required: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StructuredOutputDefinition(_Model): + """A structured output that can be produced by the agent. + + :ivar name: The name of the structured output. Required. + :vartype name: str + :ivar description: A description of the output to emit. Used by the model to determine when to + emit the output. Required. + :vartype description: str + :ivar schema: The JSON schema for the structured output. Required. + :vartype schema: dict[str, any] + :ivar strict: Whether to enforce strict validation. Default ``true``. Required. + :vartype strict: bool + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the structured output. Required.""" + description: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A description of the output to emit. Used by the model to determine when to emit the output. + Required.""" + schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The JSON schema for the structured output. Required.""" + strict: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Whether to enforce strict validation. Default ``true``. Required.""" + + @overload + def __init__( + self, + *, + name: str, + description: str, + schema: dict[str, Any], + strict: bool, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class StructuredOutputsItemResource(ItemResource, discriminator="structured_outputs"): + """StructuredOutputsItemResource. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.STRUCTURED_OUTPUTS + :ivar output: The structured output captured during the response. Required. + :vartype output: any + """ + + type: Literal[ItemType.STRUCTURED_OUTPUTS] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + output: Any = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The structured output captured during the response. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + output: Any, + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.STRUCTURED_OUTPUTS # type: ignore + + +class TaxonomyCategory(_Model): + """Taxonomy category definition. + + :ivar id: Unique identifier of the taxonomy category. Required. + :vartype id: str + :ivar name: Name of the taxonomy category. Required. + :vartype name: str + :ivar description: Description of the taxonomy category. + :vartype description: str + :ivar risk_category: Risk category associated with this taxonomy category. Required. Known + values are: "HateUnfairness", "Violence", "Sexual", and "SelfHarm". + :vartype risk_category: str or ~azure.ai.projects.models.RiskCategory + :ivar sub_categories: List of taxonomy sub categories. Required. + :vartype sub_categories: list[~azure.ai.projects.models.TaxonomySubCategory] + :ivar properties: Additional properties for the taxonomy category. + :vartype properties: dict[str, str] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier of the taxonomy category. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the taxonomy category. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the taxonomy category.""" + risk_category: Union[str, "_models.RiskCategory"] = rest_field( + name="riskCategory", visibility=["read", "create", "update", "delete", "query"] + ) + """Risk category associated with this taxonomy category. Required. Known values are: + \"HateUnfairness\", \"Violence\", \"Sexual\", and \"SelfHarm\".""" + sub_categories: list["_models.TaxonomySubCategory"] = rest_field( + name="subCategories", visibility=["read", "create", "update", "delete", "query"] + ) + """List of taxonomy sub categories. Required.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional properties for the taxonomy category.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + risk_category: Union[str, "_models.RiskCategory"], + sub_categories: list["_models.TaxonomySubCategory"], + description: Optional[str] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TaxonomySubCategory(_Model): + """Taxonomy sub-category definition. + + :ivar id: Unique identifier of the taxonomy sub-category. Required. + :vartype id: str + :ivar name: Name of the taxonomy sub-category. Required. + :vartype name: str + :ivar description: Description of the taxonomy sub-category. + :vartype description: str + :ivar enabled: List of taxonomy items under this sub-category. Required. + :vartype enabled: bool + :ivar properties: Additional properties for the taxonomy sub-category. + :vartype properties: dict[str, str] + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier of the taxonomy sub-category. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the taxonomy sub-category. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the taxonomy sub-category.""" + enabled: bool = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of taxonomy items under this sub-category. Required.""" + properties: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional properties for the taxonomy sub-category.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + name: str, + enabled: bool, + description: Optional[str] = None, + properties: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolArgumentBinding(_Model): + """ToolArgumentBinding. + + :ivar tool_name: The name of the tool to participate in the argument binding. If not provided, + then all tools with matching arguments will participate in binding. + :vartype tool_name: str + :ivar argument_name: The name of the argument within the tool. Required. + :vartype argument_name: str + """ + + tool_name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to participate in the argument binding. If not provided, then all tools + with matching arguments will participate in binding.""" + argument_name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the argument within the tool. Required.""" + + @overload + def __init__( + self, + *, + argument_name: str, + tool_name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolChoiceObject(_Model): + """ToolChoiceObject. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ToolChoiceObjectCodeInterpreter, ToolChoiceObjectComputer, ToolChoiceObjectFileSearch, + ToolChoiceObjectFunction, ToolChoiceObjectImageGen, ToolChoiceObjectMCP, + ToolChoiceObjectWebSearch + + :ivar type: Required. Known values are: "file_search", "function", "computer_use_preview", + "web_search_preview", "image_generation", "code_interpreter", and "mcp". + :vartype type: str or ~azure.ai.projects.models.ToolChoiceObjectType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"file_search\", \"function\", \"computer_use_preview\", + \"web_search_preview\", \"image_generation\", \"code_interpreter\", and \"mcp\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolChoiceObjectCodeInterpreter(ToolChoiceObject, discriminator="code_interpreter"): + """ToolChoiceObjectCodeInterpreter. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.CODE_INTERPRETER + """ + + type: Literal[ToolChoiceObjectType.CODE_INTERPRETER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.CODE_INTERPRETER # type: ignore + + +class ToolChoiceObjectComputer(ToolChoiceObject, discriminator="computer_use_preview"): + """ToolChoiceObjectComputer. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.COMPUTER + """ + + type: Literal[ToolChoiceObjectType.COMPUTER] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.COMPUTER # type: ignore + + +class ToolChoiceObjectFileSearch(ToolChoiceObject, discriminator="file_search"): + """ToolChoiceObjectFileSearch. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.FILE_SEARCH + """ + + type: Literal[ToolChoiceObjectType.FILE_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.FILE_SEARCH # type: ignore + + +class ToolChoiceObjectFunction(ToolChoiceObject, discriminator="function"): + """Use this option to force the model to call a specific function. + + :ivar type: For function calling, the type is always ``function``. Required. + :vartype type: str or ~azure.ai.projects.models.FUNCTION + :ivar name: The name of the function to call. Required. + :vartype name: str + """ + + type: Literal[ToolChoiceObjectType.FUNCTION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """For function calling, the type is always ``function``. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the function to call. Required.""" + + @overload + def __init__( + self, + *, + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.FUNCTION # type: ignore + + +class ToolChoiceObjectImageGen(ToolChoiceObject, discriminator="image_generation"): + """ToolChoiceObjectImageGen. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.IMAGE_GENERATION + """ + + type: Literal[ToolChoiceObjectType.IMAGE_GENERATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.IMAGE_GENERATION # type: ignore + + +class ToolChoiceObjectMCP(ToolChoiceObject, discriminator="mcp"): + """Use this option to force the model to call a specific tool on a remote MCP server. + + :ivar type: For MCP tools, the type is always ``mcp``. Required. + :vartype type: str or ~azure.ai.projects.models.MCP + :ivar server_label: The label of the MCP server to use. Required. + :vartype server_label: str + :ivar name: The name of the tool to call on the server. + :vartype name: str + """ + + type: Literal[ToolChoiceObjectType.MCP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """For MCP tools, the type is always ``mcp``. Required.""" + server_label: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The label of the MCP server to use. Required.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool to call on the server.""" + + @overload + def __init__( + self, + *, + server_label: str, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.MCP # type: ignore + + +class ToolChoiceObjectWebSearch(ToolChoiceObject, discriminator="web_search_preview"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH + """ + + type: Literal[ToolChoiceObjectType.WEB_SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + + @overload + def __init__( + self, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolChoiceObjectType.WEB_SEARCH # type: ignore + + +class ToolDescription(_Model): + """Description of a tool that can be used by an agent. + + :ivar name: The name of the tool. + :vartype name: str + :ivar description: A brief description of the tool's purpose. + :vartype description: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the tool.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A brief description of the tool's purpose.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolProjectConnection(_Model): + """A project connection resource. + + :ivar project_connection_id: A project connection in a ToolProjectConnectionList attached to + this tool. Required. + :vartype project_connection_id: str + """ + + project_connection_id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A project connection in a ToolProjectConnectionList attached to this tool. Required.""" + + @overload + def __init__( + self, + *, + project_connection_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ToolProjectConnectionList(_Model): + """A set of project connection resources currently used by either the ``bing_grounding``, + ``fabric_dataagent``, or ``sharepoint_grounding`` tools. + + :ivar project_connections: The project connections attached to this tool. There can be a + maximum of 1 connection + resource attached to the tool. + :vartype project_connections: list[~azure.ai.projects.models.ToolProjectConnection] + """ + + project_connections: Optional[list["_models.ToolProjectConnection"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The project connections attached to this tool. There can be a maximum of 1 connection + resource attached to the tool.""" + + @overload + def __init__( + self, + *, + project_connections: Optional[list["_models.ToolProjectConnection"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TopLogProb(_Model): + """The top log probability of a token. + + :ivar token: Required. + :vartype token: str + :ivar logprob: Required. + :vartype logprob: float + :ivar bytes: Required. + :vartype bytes: list[int] + """ + + token: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + logprob: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + bytes: list[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Required.""" + + @overload + def __init__( + self, + *, + token: str, + logprob: float, + bytes: list[int], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UserProfileMemoryItem(MemoryItem, discriminator="user_profile"): + """A memory item specifically containing user profile information extracted from conversations, + such as preferences, interests, and personal details. + + :ivar memory_id: The unique ID of the memory item. Required. + :vartype memory_id: str + :ivar updated_at: The last update time of the memory item. Required. + :vartype updated_at: ~datetime.datetime + :ivar scope: The namespace that logically groups and isolates memories, such as a user ID. + Required. + :vartype scope: str + :ivar content: The content of the memory. Required. + :vartype content: str + :ivar kind: The kind of the memory item. Required. User profile information extracted from + conversations. + :vartype kind: str or ~azure.ai.projects.models.USER_PROFILE + """ + + kind: Literal[MemoryItemKind.USER_PROFILE] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind of the memory item. Required. User profile information extracted from conversations.""" + + @overload + def __init__( + self, + *, + memory_id: str, + updated_at: datetime.datetime, + scope: str, + content: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = MemoryItemKind.USER_PROFILE # type: ignore + + +class VectorStoreFileAttributes(_Model): + """Set of 16 key-value pairs that can be attached to an object. This can be + useful for storing additional information about the object in a structured + format, and querying for objects via API or the dashboard. Keys are strings + with a maximum length of 64 characters. Values are strings with a maximum + length of 512 characters, booleans, or numbers. + + """ + + +class WebSearchAction(_Model): + """WebSearchAction. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + WebSearchActionFind, WebSearchActionOpenPage, WebSearchActionSearch + + :ivar type: Required. Known values are: "search", "open_page", and "find". + :vartype type: str or ~azure.ai.projects.models.WebSearchActionType + """ + + __mapping__: dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Required. Known values are: \"search\", \"open_page\", and \"find\".""" + + @overload + def __init__( + self, + *, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WebSearchActionFind(WebSearchAction, discriminator="find"): + """Action type "find": Searches for a pattern within a loaded page. + + :ivar type: The action type. Required. + :vartype type: str or ~azure.ai.projects.models.FIND + :ivar url: The URL of the page searched for the pattern. Required. + :vartype url: str + :ivar pattern: The pattern or text to search for within the page. Required. + :vartype pattern: str + """ + + type: Literal[WebSearchActionType.FIND] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The action type. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL of the page searched for the pattern. Required.""" + pattern: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The pattern or text to search for within the page. Required.""" + + @overload + def __init__( + self, + *, + url: str, + pattern: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = WebSearchActionType.FIND # type: ignore + + +class WebSearchActionOpenPage(WebSearchAction, discriminator="open_page"): + """Action type "open_page" - Opens a specific URL from search results. + + :ivar type: The action type. Required. + :vartype type: str or ~azure.ai.projects.models.OPEN_PAGE + :ivar url: The URL opened by the model. Required. + :vartype url: str + """ + + type: Literal[WebSearchActionType.OPEN_PAGE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The action type. Required.""" + url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The URL opened by the model. Required.""" + + @overload + def __init__( + self, + *, + url: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = WebSearchActionType.OPEN_PAGE # type: ignore + + +class WebSearchActionSearch(WebSearchAction, discriminator="search"): + """Action type "search" - Performs a web search query. + + :ivar type: The action type. Required. + :vartype type: str or ~azure.ai.projects.models.SEARCH + :ivar query: The search query. Required. + :vartype query: str + """ + + type: Literal[WebSearchActionType.SEARCH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The action type. Required.""" + query: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The search query. Required.""" + + @overload + def __init__( + self, + *, + query: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = WebSearchActionType.SEARCH # type: ignore + + +class WebSearchPreviewTool(Tool, discriminator="web_search_preview"): + """Note: web_search is not yet available via Azure OpenAI. + + :ivar type: The type of the web search tool. One of ``web_search_preview`` or + ``web_search_preview_2025_03_11``. Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_PREVIEW + :ivar user_location: The user's location. + :vartype user_location: ~azure.ai.projects.models.Location + :ivar search_context_size: High level guidance for the amount of context window space to use + for the search. One of ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of + the following types: Literal["low"], Literal["medium"], Literal["high"] + :vartype search_context_size: str or str or str + """ + + type: Literal[ToolType.WEB_SEARCH_PREVIEW] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The type of the web search tool. One of ``web_search_preview`` or + ``web_search_preview_2025_03_11``. Required.""" + user_location: Optional["_models.Location"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The user's location.""" + search_context_size: Optional[Literal["low", "medium", "high"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """High level guidance for the amount of context window space to use for the search. One of + ``low``, ``medium``, or ``high``. ``medium`` is the default. Is one of the following types: + Literal[\"low\"], Literal[\"medium\"], Literal[\"high\"]""" + + @overload + def __init__( + self, + *, + user_location: Optional["_models.Location"] = None, + search_context_size: Optional[Literal["low", "medium", "high"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ToolType.WEB_SEARCH_PREVIEW # type: ignore + + +class WebSearchToolCallItemParam(ItemParam, discriminator="web_search_call"): + """The results of a web search tool call. See the + `web search guide `_ for more information. + + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL + :ivar action: An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. + :vartype action: ~azure.ai.projects.models.WebSearchAction + """ + + type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required.""" + + @overload + def __init__( + self, + *, + action: "_models.WebSearchAction", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.WEB_SEARCH_CALL # type: ignore + + +class WebSearchToolCallItemResource(ItemResource, discriminator="web_search_call"): + """The results of a web search tool call. See the + `web search guide `_ for more information. + + :ivar id: Required. + :vartype id: str + :ivar created_by: The information about the creator of the item. + :vartype created_by: ~azure.ai.projects.models.CreatedBy + :ivar type: Required. + :vartype type: str or ~azure.ai.projects.models.WEB_SEARCH_CALL + :ivar status: The status of the web search tool call. Required. Is one of the following types: + Literal["in_progress"], Literal["searching"], Literal["completed"], Literal["failed"] + :vartype status: str or str or str or str + :ivar action: An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required. + :vartype action: ~azure.ai.projects.models.WebSearchAction + """ + + type: Literal[ItemType.WEB_SEARCH_CALL] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + status: Literal["in_progress", "searching", "completed", "failed"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The status of the web search tool call. Required. Is one of the following types: + Literal[\"in_progress\"], Literal[\"searching\"], Literal[\"completed\"], Literal[\"failed\"]""" + action: "_models.WebSearchAction" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An object describing the specific action taken in this web search call. + Includes details on how the model used the web (search, open_page, find). Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + status: Literal["in_progress", "searching", "completed", "failed"], + action: "_models.WebSearchAction", + created_by: Optional["_models.CreatedBy"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = ItemType.WEB_SEARCH_CALL # type: ignore + + +class WeeklyRecurrenceSchedule(RecurrenceSchedule, discriminator="Weekly"): + """Weekly recurrence schedule. + + :ivar type: Weekly recurrence type. Required. Weekly recurrence pattern. + :vartype type: str or ~azure.ai.projects.models.WEEKLY + :ivar days_of_week: Days of the week for the recurrence schedule. Required. + :vartype days_of_week: list[str or ~azure.ai.projects.models.DayOfWeek] + """ + + type: Literal[RecurrenceType.WEEKLY] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Weekly recurrence type. Required. Weekly recurrence pattern.""" + days_of_week: list[Union[str, "_models.DayOfWeek"]] = rest_field( + name="daysOfWeek", visibility=["read", "create", "update", "delete", "query"] + ) + """Days of the week for the recurrence schedule. Required.""" + + @overload + def __init__( + self, + *, + days_of_week: list[Union[str, "_models.DayOfWeek"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.type = RecurrenceType.WEEKLY # type: ignore + + +class WorkflowDefinition(AgentDefinition, discriminator="workflow"): + """The workflow specification in CSDL format. + + :ivar rai_config: Configuration for Responsible AI (RAI) content filtering and safety features. + :vartype rai_config: ~azure.ai.projects.models.RaiConfig + :ivar kind: Required. + :vartype kind: str or ~azure.ai.projects.models.WORKFLOW + :ivar trigger: (Deprecated) The CSDL trigger definition. Use ``workflow`` property instead to + send CSDL yaml definition inline. + :vartype trigger: dict[str, any] + :ivar workflow: The CSDL YAML definition of the workflow. + :vartype workflow: str + """ + + kind: Literal[AgentKind.WORKFLOW] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Required.""" + trigger: Optional[dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """(Deprecated) The CSDL trigger definition. Use ``workflow`` property instead to send CSDL yaml + definition inline.""" + workflow: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The CSDL YAML definition of the workflow.""" + + @overload + def __init__( + self, + *, + rai_config: Optional["_models.RaiConfig"] = None, + trigger: Optional[dict[str, Any]] = None, + workflow: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind = AgentKind.WORKFLOW # type: ignore diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py new file mode 100644 index 000000000000..6cd95db87150 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py @@ -0,0 +1,39 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List, Dict +from ._patch_evaluations import EvaluatorIds +from ._models import CustomCredential as CustomCredentialGenerated + + +class CustomCredential(CustomCredentialGenerated): + """Custom credential definition. + + :ivar type: The credential type. Always equals CredentialType.CUSTOM. Required. + :vartype type: str or ~azure.ai.projects.models.CredentialType + :ivar credential_keys: The secret custom credential keys. Required. + :vartype credential_keys: dict[str, str] + """ + + credential_keys: Dict[str, str] = {} + """The secret custom credential keys. Required.""" + + +__all__: List[str] = [ + "EvaluatorIds", + "CustomCredential", +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py new file mode 100644 index 000000000000..d362c28d0d8a --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py @@ -0,0 +1,48 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from enum import Enum + +from azure.core import CaseInsensitiveEnumMeta + + +class EvaluatorIds(str, Enum, metaclass=CaseInsensitiveEnumMeta): + RELEVANCE = "azureai://built-in/evaluators/relevance" + HATE_UNFAIRNESS = "azureai://built-in/evaluators/hate_unfairness" + VIOLENCE = "azureai://built-in/evaluators/violence" + GROUNDEDNESS = "azureai://built-in/evaluators/groundedness" + GROUNDEDNESS_PRO = "azureai://built-in/evaluators/groundedness_pro" + BLEU_SCORE = "azureai://built-in/evaluators/bleu_score" + CODE_VULNERABILITY = "azureai://built-in/evaluators/code_vulnerability" + COHERENCE = "azureai://built-in/evaluators/coherence" + CONTENT_SAFETY = "azureai://built-in/evaluators/content_safety" + F1_SCORE = "azureai://built-in/evaluators/f1_score" + FLUENCY = "azureai://built-in/evaluators/fluency" + GLEU_SCORE = "azureai://built-in/evaluators/gleu_score" + INDIRECT_ATTACK = "azureai://built-in/evaluators/indirect_attack" + INTENT_RESOLUTION = "azureai://built-in/evaluators/intent_resolution" + METEOR_SCORE = "azureai://built-in/evaluators/meteor_score" + PROTECTED_MATERIAL = "azureai://built-in/evaluators/protected_material" + RETRIEVAL = "azureai://built-in/evaluators/retrieval" + ROUGE_SCORE = "azureai://built-in/evaluators/rouge_score" + SELF_HARM = "azureai://built-in/evaluators/self_harm" + SEXUAL = "azureai://built-in/evaluators/sexual" + SIMILARITY = "azureai://built-in/evaluators/similarity" + QA = "azureai://built-in/evaluators/qa" + DOCUMENT_RETRIEVAL = "azureai://built-in/evaluators/document_retrieval" + TASK_ADHERENCE = "azureai://built-in/evaluators/task_adherence" + TOOL_CALL_ACCURACY = "azureai://built-in/evaluators/tool_call_accuracy" + UNGROUNDED_ATTRIBUTES = "azureai://built-in/evaluators/ungrounded_attributes" + RESPONSE_COMPLETENESS = "azureai://built-in/evaluators/response_completeness" + # AOAI Graders + LABEL_GRADER = "azureai://built-in/evaluators/azure-openai/label_grader" + STRING_CHECK_GRADER = "azureai://built-in/evaluators/azure-openai/string_check_grader" + TEXT_SIMILARITY_GRADER = "azureai://built-in/evaluators/azure-openai/text_similarity_grader" + GENERAL_GRADER = "azureai://built-in/evaluators/azure-openai/custom_grader" + SCORE_MODEL_GRADER = "azureai://built-in/evaluators/azure-openai/score_model_grader" diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py new file mode 100644 index 000000000000..03b8c4ce34a0 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py @@ -0,0 +1,1237 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except, import-error, no-value-for-parameter + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py new file mode 100644 index 000000000000..45a3e44e45cb --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py @@ -0,0 +1,2030 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized |= target_obj.additional_properties + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(list[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/py.typed b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py new file mode 100644 index 000000000000..4b28599e3bce --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -0,0 +1,314 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +# pylint: disable=broad-exception-caught,unused-argument,logging-fstring-interpolation,too-many-statements,too-many-return-statements +import inspect +import json +import os +import traceback +from abc import abstractmethod +from typing import Any, AsyncGenerator, Generator, Union + +import uvicorn +from opentelemetry import context as otel_context, trace +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator +from starlette.applications import Starlette +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.middleware.cors import CORSMiddleware +from starlette.requests import Request +from starlette.responses import JSONResponse, Response, StreamingResponse +from starlette.routing import Route +from starlette.types import ASGIApp + +from ..constants import Constants +from ..logger import get_logger, request_context +from ..models import ( + Response as OpenAIResponse, + ResponseStreamEvent, +) +from .common.agent_run_context import AgentRunContext + +logger = get_logger() +DEBUG_ERRORS = os.environ.get(Constants.AGENT_DEBUG_ERRORS, "false").lower() == "true" + + +class AgentRunContextMiddleware(BaseHTTPMiddleware): + def __init__(self, app: ASGIApp): + super().__init__(app) + + async def dispatch(self, request: Request, call_next): + if request.url.path in ("/runs", "/responses"): + try: + self.set_request_id_to_context_var(request) + payload = await request.json() + except Exception as e: + logger.error(f"Invalid JSON payload: {e}") + return JSONResponse({"error": f"Invalid JSON payload: {e}"}, status_code=400) + try: + request.state.agent_run_context = AgentRunContext(payload) + self.set_run_context_to_context_var(request.state.agent_run_context) + except Exception as e: + logger.error(f"Context build failed: {e}.", exc_info=True) + return JSONResponse({"error": f"Context build failed: {e}"}, status_code=500) + return await call_next(request) + + def set_request_id_to_context_var(self, request): + request_id = request.headers.get("X-Request-Id", None) + if request_id: + ctx = request_context.get() or {} + ctx["azure.ai.agentshosting.x-request-id"] = request_id + request_context.set(ctx) + + def set_run_context_to_context_var(self, run_context): + agent_id = "" + agent_obj = run_context.get_agent_id_object() + if agent_obj: + agent_name = getattr(agent_obj, "name", "") + agent_version = getattr(agent_obj, "version", "") + agent_id = f"{agent_name}:{agent_version}" + + res = { + "azure.ai.agentshosting.response_id": run_context.response_id or "", + "azure.ai.agentshosting.conversation_id": run_context.conversation_id or "", + "azure.ai.agentshosting.streaming": str(run_context.stream or False), + "gen_ai.agent.id": agent_id, + "gen_ai.provider.name": "AzureAI Hosted Agents", + "gen_ai.response.id": run_context.response_id or "", + } + ctx = request_context.get() or {} + ctx.update(res) + request_context.set(ctx) + + +class FoundryCBAgent: + def __init__(self): + async def runs_endpoint(request): + # Set up tracing context and span + context = request.state.agent_run_context + ctx = request_context.get() + with self.tracer.start_as_current_span( + name=f"ContainerAgentsAdapter-{context.response_id}", + attributes=ctx, + kind=trace.SpanKind.SERVER, + ): + try: + logger.info("Start processing CreateResponse request:") + + context_carrier = {} + TraceContextTextMapPropagator().inject(context_carrier) + + resp = await self.agent_run(context) + + if inspect.isgenerator(resp): + # Prefetch first event to allow 500 status if generation fails immediately + try: + first_event = next(resp) + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Generator initialization failed: %s\n%s", e, traceback.format_exc()) + return JSONResponse({"error": err_msg}, status_code=500) + + def gen(): + ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) + token = otel_context.attach(ctx) + error_sent = False + try: + # yield prefetched first event + yield _event_to_sse_chunk(first_event) + for event in resp: + yield _event_to_sse_chunk(event) + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Error in non-async generator: %s\n%s", e, traceback.format_exc()) + payload = {"error": err_msg} + yield f"event: error\ndata: {json.dumps(payload)}\n\n" + yield "data: [DONE]\n\n" + error_sent = True + finally: + logger.info("End of processing CreateResponse request:") + otel_context.detach(token) + if not error_sent: + yield "data: [DONE]\n\n" + + return StreamingResponse(gen(), media_type="text/event-stream") + if inspect.isasyncgen(resp): + # Prefetch first async event to allow early 500 + try: + first_event = await resp.__anext__() + except StopAsyncIteration: + # No items produced; treat as empty successful stream + def empty_gen(): + yield "data: [DONE]\n\n" + + return StreamingResponse(empty_gen(), media_type="text/event-stream") + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Async generator initialization failed: %s\n%s", e, traceback.format_exc()) + return JSONResponse({"error": err_msg}, status_code=500) + + async def gen_async(): + ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) + token = otel_context.attach(ctx) + error_sent = False + try: + # yield prefetched first event + yield _event_to_sse_chunk(first_event) + async for event in resp: + yield _event_to_sse_chunk(event) + except Exception as e: # noqa: BLE001 + err_msg = str(e) if DEBUG_ERRORS else "Internal error" + logger.error("Error in async generator: %s\n%s", e, traceback.format_exc()) + payload = {"error": err_msg} + yield f"event: error\ndata: {json.dumps(payload)}\n\n" + yield "data: [DONE]\n\n" + error_sent = True + finally: + logger.info("End of processing CreateResponse request.") + otel_context.detach(token) + if not error_sent: + yield "data: [DONE]\n\n" + + return StreamingResponse(gen_async(), media_type="text/event-stream") + logger.info("End of processing CreateResponse request.") + return JSONResponse(resp.as_dict()) + except Exception as e: + # TODO: extract status code from exception + logger.error(f"Error processing CreateResponse request: {traceback.format_exc()}") + return JSONResponse({"error": str(e)}, status_code=500) + + async def liveness_endpoint(request): + result = await self.agent_liveness(request) + return _to_response(result) + + async def readiness_endpoint(request): + result = await self.agent_readiness(request) + return _to_response(result) + + routes = [ + Route("/runs", runs_endpoint, methods=["POST"], name="agent_run"), + Route("/responses", runs_endpoint, methods=["POST"], name="agent_response"), + Route("/liveness", liveness_endpoint, methods=["GET"], name="agent_liveness"), + Route("/readiness", readiness_endpoint, methods=["GET"], name="agent_readiness"), + ] + + self.app = Starlette(routes=routes) + self.app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + self.app.add_middleware(AgentRunContextMiddleware) + + @self.app.on_event("startup") + async def attach_appinsights_logger(): + import logging + + for handler in logger.handlers: + if handler.name == "appinsights_handler": + for logger_name in ["uvicorn", "uvicorn.error", "uvicorn.access"]: + uv_logger = logging.getLogger(logger_name) + uv_logger.addHandler(handler) + uv_logger.setLevel(logger.level) + uv_logger.propagate = False + + self.tracer = None + + @abstractmethod + async def agent_run( + self, context: AgentRunContext + ) -> Union[OpenAIResponse, Generator[ResponseStreamEvent, Any, Any], AsyncGenerator[ResponseStreamEvent, Any]]: + raise NotImplementedError + + async def agent_liveness(self, request) -> Union[Response, dict]: + return Response(status_code=200) + + async def agent_readiness(self, request) -> Union[Response, dict]: + return {"status": "ready"} + + async def run_async( + self, + port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088)), + ) -> None: + """ + Awaitable server starter for use **inside** an existing event loop. + + :param port: Port to listen on. + :type port: int + """ + self.init_tracing() + config = uvicorn.Config(self.app, host="0.0.0.0", port=port, loop="asyncio") + server = uvicorn.Server(config) + logger.info(f"Starting FoundryCBAgent server async on port {port}") + await server.serve() + + def run(self, port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088))) -> None: + """ + Start a Starlette server on localhost: exposing: + POST /runs + POST /responses + GET /liveness + GET /readiness + + :param port: Port to listen on. + :type port: int + """ + self.init_tracing() + logger.info(f"Starting FoundryCBAgent server on port {port}") + uvicorn.run(self.app, host="0.0.0.0", port=port) + + def init_tracing(self): + exporter = os.environ.get(Constants.OTEL_EXPORTER_ENDPOINT) + app_insights_conn_str = os.environ.get(Constants.APPLICATION_INSIGHTS_CONNECTION_STRING) + if exporter or app_insights_conn_str: + from opentelemetry.sdk.resources import Resource + from opentelemetry.sdk.trace import TracerProvider + + resource = Resource.create(self.get_trace_attributes()) + provider = TracerProvider(resource=resource) + if exporter: + self.setup_otlp_exporter(exporter, provider) + if app_insights_conn_str: + self.setup_application_insights_exporter(app_insights_conn_str, provider) + trace.set_tracer_provider(provider) + self.init_tracing_internal(exporter_endpoint=exporter, app_insights_conn_str=app_insights_conn_str) + self.tracer = trace.get_tracer(__name__) + + def get_trace_attributes(self): + return { + "service.name": "azure.ai.agentshosting", + } + + def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None): + pass + + def setup_application_insights_exporter(self, connection_string, provider): + from opentelemetry.sdk.trace.export import BatchSpanProcessor + + from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter + + exporter_instance = AzureMonitorTraceExporter.from_connection_string(connection_string) + processor = BatchSpanProcessor(exporter_instance) + provider.add_span_processor(processor) + logger.info("Tracing setup with Application Insights exporter.") + + def setup_otlp_exporter(self, endpoint, provider): + from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + from opentelemetry.sdk.trace.export import BatchSpanProcessor + + exporter_instance = OTLPSpanExporter(endpoint=endpoint) + processor = BatchSpanProcessor(exporter_instance) + provider.add_span_processor(processor) + logger.info(f"Tracing setup with OTLP exporter: {endpoint}") + + +def _event_to_sse_chunk(event: ResponseStreamEvent) -> str: + event_data = json.dumps(event.as_dict()) + if event.type: + return f"event: {event.type}\ndata: {event_data}\n\n" + return f"data: {event_data}\n\n" + + +def _to_response(result: Union[Response, dict]) -> Response: + return result if isinstance(result, Response) else JSONResponse(result) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py new file mode 100644 index 000000000000..6fae56f0027d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py @@ -0,0 +1,76 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from ...logger import get_logger +from ...models import CreateResponse +from ...models.projects import AgentId, AgentReference, ResponseConversation1 +from .id_generator.foundry_id_generator import FoundryIdGenerator +from .id_generator.id_generator import IdGenerator + +logger = get_logger() + + +class AgentRunContext: + def __init__(self, payload: dict): + self._raw_payload = payload + self._request = _deserialize_create_response(payload) + self._id_generator = FoundryIdGenerator.from_request(payload) + self._response_id = self._id_generator.response_id + self._conversation_id = self._id_generator.conversation_id + self._stream = self.request.get("stream", False) + + @property + def raw_payload(self) -> dict: + return self._raw_payload + + @property + def request(self) -> CreateResponse: + return self._request + + @property + def id_generator(self) -> IdGenerator: + return self._id_generator + + @property + def response_id(self) -> str: + return self._response_id + + @property + def conversation_id(self) -> str: + return self._conversation_id + + @property + def stream(self) -> bool: + return self._stream + + def get_agent_id_object(self) -> AgentId: + agent = self.request.get("agent") + if not agent: + return None # type: ignore + return AgentId( + { + "type": agent.type, + "name": agent.name, + "version": agent.version, + } + ) + + def get_conversation_object(self) -> ResponseConversation1: + if not self._conversation_id: + return None # type: ignore + return ResponseConversation1(id=self._conversation_id) + + +def _deserialize_create_response(payload: dict) -> CreateResponse: + _deserialized = CreateResponse(**payload) + + raw_agent_reference = payload.get("agent") + if raw_agent_reference: + _deserialized["agent"] = _deserialize_agent_reference(raw_agent_reference) + return _deserialized + + +def _deserialize_agent_reference(payload: dict) -> AgentReference: + if not payload: + return None # type: ignore + return AgentReference(**payload) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py new file mode 100644 index 000000000000..fdf8caba9ef5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py new file mode 100644 index 000000000000..910a7c481daa --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py @@ -0,0 +1,136 @@ +# pylint: disable=docstring-missing-return,docstring-missing-param,docstring-missing-rtype +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import base64 +import os +import re +from typing import Optional + +from .id_generator import IdGenerator + +_WATERMARK_RE = re.compile(r"^[A-Za-z0-9]*$") + + +class FoundryIdGenerator(IdGenerator): + """ + Python port of the C# FoundryIdGenerator. + + Notable behaviors preserved: + - Secure, alphanumeric entropy via base64 filtering, retrying until exact length. + - Watermark must be strictly alphanumeric; inserted mid-entropy. + - Only one delimiter (default "_") after the prefix; no delimiter between entropy and partition key. + - Partition key is the last N characters of the second ID segment (post-delimiter). + """ + + def __init__(self, response_id: Optional[str], conversation_id: Optional[str]): + self.response_id = response_id or self._new_id("resp") + self.conversation_id = conversation_id or self._new_id("conv") + self._partition_id = self._extract_partition_id(self.conversation_id) + + @classmethod + def from_request(cls, payload: dict) -> "FoundryIdGenerator": + response_id = payload.get("metadata", {}).get("response_id", None) + conv_id_raw = payload.get("conversation", None) + if isinstance(conv_id_raw, str): + conv_id = conv_id_raw + elif isinstance(conv_id_raw, dict): + conv_id = conv_id_raw.get("id", None) + else: + conv_id = None + return cls(response_id, conv_id) + + def generate(self, category: Optional[str] = None) -> str: + prefix = "id" if not category else category + return self._new_id(prefix, partition_key=self._partition_id) + + # --- Static helpers (mirror C# private static methods) -------------------- + + @staticmethod + def _new_id( + prefix: str, + string_length: int = 32, + partition_key_length: int = 18, + infix: Optional[str] = "", + watermark: str = "", + delimiter: str = "_", + partition_key: Optional[str] = None, + partition_key_hint: str = "", + ) -> str: + """ + Generates a new ID. + + Format matches the C# logic: + f"{prefix}{delimiter}{infix}{partitionKey}{entropy}" + (i.e., exactly one delimiter after prefix; no delimiter between entropy and partition key) + """ + entropy = FoundryIdGenerator._secure_entropy(string_length) + + if partition_key is not None: + pkey = partition_key + elif partition_key_hint: + pkey = FoundryIdGenerator._extract_partition_id( + partition_key_hint, + string_length=string_length, + partition_key_length=partition_key_length, + delimiter=delimiter, + ) + else: + pkey = FoundryIdGenerator._secure_entropy(partition_key_length) + + if watermark: + if not _WATERMARK_RE.fullmatch(watermark): + raise ValueError(f"Only alphanumeric characters may be in watermark: {watermark}") + half = string_length // 2 + entropy = f"{entropy[:half]}{watermark}{entropy[half:]}" + + infix = infix or "" + prefix_part = f"{prefix}{delimiter}" if prefix else "" + return f"{prefix_part}{entropy}{infix}{pkey}" + + @staticmethod + def _secure_entropy(string_length: int) -> str: + """ + Generates a secure random alphanumeric string of exactly `string_length`. + Re-tries whole generation until the filtered base64 string is exactly the desired length, + matching the C# behavior. + """ + if string_length < 1: + raise ValueError("Must be greater than or equal to 1") + + while True: + # Use cryptographically secure bytes; base64 then filter to alnum. + buf = os.urandom(string_length) + encoded = base64.b64encode(buf).decode("ascii") + alnum = "".join(ch for ch in encoded if ch.isalnum()) + if len(alnum) >= string_length: + return alnum[:string_length] + # else: retry, same as the C# loop which discards and regenerates + + @staticmethod + def _extract_partition_id( + id_str: str, + string_length: int = 32, + partition_key_length: int = 18, + delimiter: str = "_", + ) -> str: + """ + Extracts partition key from an existing ID. + + Expected shape (per C# logic): "_" + We take the last `partition_key_length` characters from the *second* segment. + """ + if not id_str: + raise ValueError("Id cannot be null or empty") + + parts = [p for p in id_str.split(delimiter) if p] # remove empty entries like C# Split(..., RemoveEmptyEntries) + if len(parts) < 2: + raise ValueError(f"Id '{id_str}' does not contain a valid partition key.") + + segment = parts[1] + if len(segment) < string_length + partition_key_length: + raise ValueError(f"Id '{id_str}' does not contain a valid id.") + + return segment[-partition_key_length:] diff --git a/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py new file mode 100644 index 000000000000..48f0d9add17d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/id_generator.py @@ -0,0 +1,19 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC, abstractmethod +from typing import Optional + + +class IdGenerator(ABC): + @abstractmethod + def generate(self, category: Optional[str] = None) -> str: ... + + def generate_function_call_id(self) -> str: + return self.generate("func") + + def generate_function_output_id(self) -> str: + return self.generate("funcout") + + def generate_message_id(self) -> str: + return self.generate("msg") diff --git a/sdk/ai/azure-ai-agentserver-core/cspell.json b/sdk/ai/azure-ai-agentserver-core/cspell.json new file mode 100644 index 000000000000..126cadc0625c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/cspell.json @@ -0,0 +1,27 @@ +{ + "ignoreWords": [ + "Agentic", + "UPIA", + "ANSII", + "inpainting", + "CSDL", + "azureai", + "GLEU", + "fstring", + "alnum", + "GENAI", + "Prereqs", + "mslearn", + "PYTHONIOENCODING", + "GETFL", + "DETFL", + "SETFL", + "Planifica" + ], + "ignorePaths": [ + "*.csv", + "*.json", + "*.rst", + "samples/**" + ] + } \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/dev_requirements.txt b/sdk/ai/azure-ai-agentserver-core/dev_requirements.txt new file mode 100644 index 000000000000..129e3e21fef1 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/dev_requirements.txt @@ -0,0 +1,2 @@ +-e ../../../eng/tools/azure-sdk-tools +python-dotenv \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst new file mode 100644 index 000000000000..da01b083b0b3 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst @@ -0,0 +1,34 @@ +azure.ai.agentserver.core package +================================= + +.. automodule:: azure.ai.agentserver.core + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.server + +Submodules +---------- + +azure.ai.agentserver.core.constants module +------------------------------------------ + +.. automodule:: azure.ai.agentserver.core.constants + :inherited-members: + :members: + :undoc-members: + +azure.ai.agentserver.core.logger module +--------------------------------------- + +.. automodule:: azure.ai.agentserver.core.logger + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst new file mode 100644 index 000000000000..cf935aa1d1ed --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.id_generator.rst @@ -0,0 +1,26 @@ +azure.ai.agentserver.core.server.common.id\_generator package +============================================================= + +.. automodule:: azure.ai.agentserver.core.server.common.id_generator + :inherited-members: + :members: + :undoc-members: + +Submodules +---------- + +azure.ai.agentserver.core.server.common.id\_generator.foundry\_id\_generator module +----------------------------------------------------------------------------------- + +.. automodule:: azure.ai.agentserver.core.server.common.id_generator.foundry_id_generator + :inherited-members: + :members: + :undoc-members: + +azure.ai.agentserver.core.server.common.id\_generator.id\_generator module +-------------------------------------------------------------------------- + +.. automodule:: azure.ai.agentserver.core.server.common.id_generator.id_generator + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst new file mode 100644 index 000000000000..26c4aaf4d15a --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.common.rst @@ -0,0 +1,26 @@ +azure.ai.agentserver.core.server.common package +=============================================== + +.. automodule:: azure.ai.agentserver.core.server.common + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.server.common.id_generator + +Submodules +---------- + +azure.ai.agentserver.core.server.common.agent\_run\_context module +------------------------------------------------------------------ + +.. automodule:: azure.ai.agentserver.core.server.common.agent_run_context + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst new file mode 100644 index 000000000000..b82fa765b839 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.server.rst @@ -0,0 +1,26 @@ +azure.ai.agentserver.core.server package +======================================== + +.. automodule:: azure.ai.agentserver.core.server + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.server.common + +Submodules +---------- + +azure.ai.agentserver.core.server.base module +-------------------------------------------- + +.. automodule:: azure.ai.agentserver.core.server.base + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/ai/azure-ai-agentserver-core/pyproject.toml b/sdk/ai/azure-ai-agentserver-core/pyproject.toml new file mode 100644 index 000000000000..32c533034a3b --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/pyproject.toml @@ -0,0 +1,70 @@ +[project] +name = "azure-ai-agentserver-core" +dynamic = ["version", "readme"] +description = "Agents server adapter for Azure AI" +requires-python = ">=3.10" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +license = "MIT" +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +keywords = ["azure", "azure sdk"] + +dependencies = [ + "azure-monitor-opentelemetry>=1.5.0", + "azure-ai-projects", + "azure-ai-agents>=1.2.0b5", + "azure-core>=1.35.0", + "azure-identity", + "openai>=1.80.0", + "opentelemetry-api>=1.35", + "opentelemetry-exporter-otlp-proto-http", + "starlette>=0.45.0", + "uvicorn>=0.31.0", +] + +[build-system] +requires = ["setuptools>=69", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "samples*", + "doc*", + "azure", + "azure.ai", +] + +[tool.setuptools.dynamic] +version = { attr = "azure.ai.agentserver.core._version.VERSION" } +readme = { file = ["README.md"], content-type = "text/markdown" } + +[tool.setuptools.package-data] +pytyped = ["py.typed"] + +[tool.ruff] +line-length = 120 +target-version = "py311" +lint.select = ["E", "F", "B", "I"] # E=pycodestyle errors, F=Pyflakes, B=bugbear, I=import sort +lint.ignore = [] +fix = false +exclude = [ + "**/azure/ai/agentserver/core/models/", +] + +[tool.ruff.lint.isort] +known-first-party = ["azure.ai.agentserver.core"] +combine-as-imports = true + +[tool.azure-sdk-build] +verifytypes = false # has unknown dependencies +pyright = false \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/pyrightconfig.json b/sdk/ai/azure-ai-agentserver-core/pyrightconfig.json new file mode 100644 index 000000000000..d53d8b9e5fe9 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/pyrightconfig.json @@ -0,0 +1,10 @@ +{ + "reportTypeCommentUsage": true, + "reportMissingImports": false, + "pythonVersion": "3.11", + "exclude": [ + "./samples" + ], + "extraPaths": [ + ] +} \ No newline at end of file diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample new file mode 100644 index 000000000000..a19b1c6d02f7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/.env.sample @@ -0,0 +1,24 @@ +# Core agent configuration +API_HOST=github +WEEKEND_PLANNER_MODE=container + +# GitHub Models (when API_HOST=github) +GITHUB_TOKEN=your-github-token +GITHUB_OPENAI_BASE_URL=https://models.inference.ai.azure.com +GITHUB_MODEL=gpt-4o + +# Azure OpenAI (when API_HOST=azure) +AZURE_OPENAI_ENDPOINT=https://.openai.azure.com/ +AZURE_OPENAI_VERSION=2025-01-01-preview +AZURE_OPENAI_CHAT_DEPLOYMENT= + +# Telemetry & tracing +OTEL_EXPORTER_OTLP_ENDPOINT=http://127.0.0.1:4318/v1/traces +OTEL_EXPORTER_OTLP_PROTOCOL=grpc +OTEL_EXPORTER_OTLP_GRPC_ENDPOINT=http://127.0.0.1:4317 +APPLICATION_INSIGHTS_CONNECTION_STRING= + +# Optional GenAI capture overrides +OTEL_GENAI_AGENT_NAME=Bilingual Weekend Planner Agent +OTEL_GENAI_AGENT_DESCRIPTION=Assistant that plans weekend activities using weather and events data in multiple languages +OTEL_GENAI_AGENT_ID=bilingual-weekend-planner diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md new file mode 100644 index 000000000000..83296f5dd348 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/README.md @@ -0,0 +1,42 @@ +Bilingual Weekend Planner (Custom Container + Telemetry) + +- Container-hosted multi-agent weekend planner with full GenAI telemetry capture and a standalone tracing demo that exercises `opentelemetry-instrumentation-openai-agents-v2`. + +Prereqs +- Optional: Activate repo venv `source .venv/bin/activate` +- Install deps `pip install -U -r samples/python/custom/bilingual_weekend_planner/requirements.txt` + +Env Vars +Choose the API host via `API_HOST`: + +- `github`: GitHub Models hosted on Azure AI Inference + - `GITHUB_TOKEN` + - Optional: `GITHUB_OPENAI_BASE_URL` (default `https://models.inference.ai.azure.com`) + - Optional: `GITHUB_MODEL` (default `gpt-4o`) +- `azure`: Azure OpenAI + - `AZURE_OPENAI_ENDPOINT` (e.g. `https://.openai.azure.com/`) + - `AZURE_OPENAI_VERSION` (e.g. `2025-01-01-preview`) + - `AZURE_OPENAI_CHAT_DEPLOYMENT` (deployment name) + +Modes +- Container (default): runs the bilingual triage agent via `FoundryCBAgent`. +- `API_HOST=github GITHUB_TOKEN=... ./run.sh` +- `API_HOST=azure AZURE_OPENAI_ENDPOINT=... AZURE_OPENAI_VERSION=2025-01-01-preview AZURE_OPENAI_CHAT_DEPLOYMENT=... ./run.sh` + - Test (non-stream): + `curl -s http://localhost:8088/responses -H 'Content-Type: application/json' -d '{"input":"What should I do this weekend in Seattle?"}'` + - Test (stream): + `curl -s http://localhost:8088/responses -H 'Content-Type: application/json' -d '{"input":"Plan my weekend in Barcelona","stream":true}'` +- Telemetry demo: set `WEEKEND_PLANNER_MODE=demo` to run the content-capture simulation (no model calls). + `WEEKEND_PLANNER_MODE=demo python main.py` + +Telemetry +- Console exporter is enabled by default; set `OTEL_EXPORTER_OTLP_ENDPOINT` (HTTP) or `OTEL_EXPORTER_OTLP_GRPC_ENDPOINT` to export spans elsewhere. +- Set `APPLICATION_INSIGHTS_CONNECTION_STRING` to export spans to Azure Monitor. +- GenAI capture flags are pre-configured (content, system instructions, tool metadata). +- `opentelemetry-instrumentation-openai-agents-v2` enables span-and-event message capture for requests, responses, and tool payloads. +- The tracing demo uses the `agents.tracing` helpers to emit spans without invoking external APIs. + +Notes +- Uses `FoundryCBAgent` to host the bilingual weekend planner triage agent on `http://localhost:8088`. +- Tools: `get_weather`, `get_activities`, `get_current_date`. +- Rich logger output highlights tool invocations; bilingual agents route traveler requests to the right language specialist. diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py new file mode 100644 index 000000000000..099d8dc45181 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py @@ -0,0 +1,579 @@ +# mypy: ignore-errors +"""Bilingual weekend planner sample with full GenAI telemetry capture.""" + +from __future__ import annotations + +import json +import logging +import os +import random +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Callable +from urllib.parse import urlparse + +import azure.identity +import openai +from agents import ( + Agent, + OpenAIChatCompletionsModel, + Runner, + function_tool, + set_default_openai_client, + set_tracing_disabled, +) +from agents.tracing import ( + agent_span as tracing_agent_span, + function_span as tracing_function_span, + generation_span as tracing_generation_span, + trace as tracing_trace, +) +from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent +from azure.ai.agentserver.core.models import ( + CreateResponse, + Response as OpenAIResponse, +) +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + ResponseCompletedEvent, + ResponseCreatedEvent, + ResponseOutputItemAddedEvent, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) +from dotenv import load_dotenv +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.openai_agents import OpenAIAgentsInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from rich.logging import RichHandler + +try: + from azure.monitor.opentelemetry.exporter import ( # mypy: ignore + AzureMonitorTraceExporter, + ) +except Exception: # pragma: no cover + AzureMonitorTraceExporter = None # mypy: ignore + +# Load env early so adapter init sees them +load_dotenv(override=True) + + +logging.basicConfig( + level=logging.WARNING, + format="%(message)s", + datefmt="[%X]", + handlers=[RichHandler()], +) +logger = logging.getLogger("bilingual_weekend_planner") +RUN_MODE = os.getenv("WEEKEND_PLANNER_MODE", "container").lower() + + +@dataclass +class _ApiConfig: + """Helper describing how to create the OpenAI client.""" + + build_client: Callable[[], openai.AsyncOpenAI] + model_name: str + base_url: str + provider: str + + +def _set_capture_env(provider: str, base_url: str) -> None: + """Enable all GenAI capture toggles prior to instrumentation.""" + + capture_defaults = { + "OTEL_INSTRUMENTATION_OPENAI_AGENTS_CAPTURE_CONTENT": "true", + "OTEL_INSTRUMENTATION_OPENAI_AGENTS_CAPTURE_METRICS": "true", + "OTEL_GENAI_CAPTURE_MESSAGES": "true", + "OTEL_GENAI_CAPTURE_SYSTEM_INSTRUCTIONS": "true", + "OTEL_GENAI_CAPTURE_TOOL_DEFINITIONS": "true", + "OTEL_GENAI_EMIT_OPERATION_DETAILS": "true", + "OTEL_GENAI_AGENT_NAME": os.getenv( + "OTEL_GENAI_AGENT_NAME", + "Bilingual Weekend Planner Agent", + ), + "OTEL_GENAI_AGENT_DESCRIPTION": os.getenv( + "OTEL_GENAI_AGENT_DESCRIPTION", + "Assistant that plans weekend activities using weather and events data in multiple languages", + ), + "OTEL_GENAI_AGENT_ID": os.getenv( + "OTEL_GENAI_AGENT_ID", "bilingual-weekend-planner" + ), + } + for env_key, value in capture_defaults.items(): + os.environ.setdefault(env_key, value) + + parsed = urlparse(base_url) + if parsed.hostname: + os.environ.setdefault("OTEL_GENAI_SERVER_ADDRESS", parsed.hostname) + if parsed.port: + os.environ.setdefault("OTEL_GENAI_SERVER_PORT", str(parsed.port)) + + +def _resolve_api_config() -> _ApiConfig: + """Return the client configuration for the requested host.""" + + host = os.getenv("API_HOST", "github").lower() + + if host == "github": + base_url = os.getenv( + "GITHUB_OPENAI_BASE_URL", + "https://models.inference.ai.azure.com", + ).rstrip("/") + model_name = os.getenv("GITHUB_MODEL", "gpt-4o") + api_key = os.environ.get("GITHUB_TOKEN") + if not api_key: + if RUN_MODE != "demo": + raise RuntimeError("GITHUB_TOKEN is required when API_HOST=github") + api_key = "demo-key" + + def _build_client() -> openai.AsyncOpenAI: + return openai.AsyncOpenAI(base_url=base_url, api_key=api_key) + + return _ApiConfig( + build_client=_build_client, + model_name=model_name, + base_url=base_url, + provider="azure.ai.inference", + ) + + if host == "azure": + # Explicitly check for required environment variables + if "AZURE_OPENAI_ENDPOINT" not in os.environ: + raise ValueError("AZURE_OPENAI_ENDPOINT is required when API_HOST=azure") + if "AZURE_OPENAI_VERSION" not in os.environ: + raise ValueError("AZURE_OPENAI_VERSION is required when API_HOST=azure") + if "AZURE_OPENAI_CHAT_DEPLOYMENT" not in os.environ: + raise ValueError( + "AZURE_OPENAI_CHAT_DEPLOYMENT is required when API_HOST=azure" + ) + endpoint = os.environ["AZURE_OPENAI_ENDPOINT"].rstrip("/") + api_version = os.environ["AZURE_OPENAI_VERSION"] + deployment = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"] + + credential = azure.identity.DefaultAzureCredential() + token_provider = azure.identity.get_bearer_token_provider( + credential, + "https://cognitiveservices.azure.com/.default", + ) + + def _build_client() -> openai.AsyncAzureOpenAI: + return openai.AsyncAzureOpenAI( + api_version=api_version, + azure_endpoint=endpoint, + azure_ad_token_provider=token_provider, + ) + + return _ApiConfig( + build_client=_build_client, + model_name=deployment, + base_url=endpoint, + provider="azure.ai.openai", + ) + + raise ValueError( + f"Unsupported API_HOST '{host}'. Supported values are 'github' or 'azure'." + ) + + +def _configure_otel() -> None: + """Configure the tracer provider and exporters.""" + + grpc_endpoint = os.getenv("OTEL_EXPORTER_OTLP_GRPC_ENDPOINT") + if not grpc_endpoint: + default_otlp_endpoint = os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT") + protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "grpc").lower() + if default_otlp_endpoint and protocol == "grpc": + grpc_endpoint = default_otlp_endpoint + + conn = os.getenv("APPLICATION_INSIGHTS_CONNECTION_STRING") + resource = Resource.create( + { + "service.name": "weekend-planner-service", + "service.namespace": "leisure-orchestration", + "service.version": os.getenv("SERVICE_VERSION", "1.0.0"), + } + ) + + tracer_provider = TracerProvider(resource=resource) + + if grpc_endpoint: + tracer_provider.add_span_processor( + BatchSpanProcessor(OTLPSpanExporter(endpoint=grpc_endpoint)) + ) + print(f"[otel] OTLP gRPC exporter configured ({grpc_endpoint})") + elif conn: + if AzureMonitorTraceExporter is None: + print( + "Warning: Azure Monitor exporter not installed. " + "Install with: pip install azure-monitor-opentelemetry-exporter", + ) + tracer_provider.add_span_processor( + BatchSpanProcessor(ConsoleSpanExporter()) + ) + else: + tracer_provider.add_span_processor( + BatchSpanProcessor( + AzureMonitorTraceExporter.from_connection_string(conn) + ) + ) + print("[otel] Azure Monitor trace exporter configured") + else: + tracer_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) + print("[otel] Console span exporter configured") + print( + "[otel] Set APPLICATION_INSIGHTS_CONNECTION_STRING to export to Application Insights " + "instead of the console", + ) + + trace.set_tracer_provider(tracer_provider) + + +api_config = _resolve_api_config() +_set_capture_env(api_config.provider, api_config.base_url) +_configure_otel() +OpenAIAgentsInstrumentor().instrument( + tracer_provider=trace.get_tracer_provider(), + capture_message_content="span_and_event", + agent_name="Weekend Planner", + base_url=api_config.base_url, + system=api_config.provider, +) + +client = api_config.build_client() +set_default_openai_client(client) +set_tracing_disabled(False) + + +def _chat_model() -> OpenAIChatCompletionsModel: + """Return the chat completions model used for weekend planning.""" + + return OpenAIChatCompletionsModel(model=api_config.model_name, openai_client=client) + + +SUNNY_WEATHER_PROBABILITY = 0.05 + + +@function_tool +def get_weather(city: str) -> dict[str, object]: + """Fetch mock weather information for the requested city.""" + + logger.info("Getting weather for %s", city) + if random.random() < SUNNY_WEATHER_PROBABILITY: + return {"city": city, "temperature": 72, "description": "Sunny"} + return {"city": city, "temperature": 60, "description": "Rainy"} + + +@function_tool +def get_activities(city: str, date: str) -> list[dict[str, object]]: + """Return mock activities for the supplied city and date.""" + + logger.info("Getting activities for %s on %s", city, date) + return [ + {"name": "Hiking", "location": city}, + {"name": "Beach", "location": city}, + {"name": "Museum", "location": city}, + ] + + +@function_tool +def get_current_date() -> str: + """Return the current date as YYYY-MM-DD.""" + + logger.info("Getting current date") + return datetime.now().strftime("%Y-%m-%d") + + +ENGLISH_WEEKEND_PLANNER = Agent( + name="Weekend Planner (English)", + instructions=( + "You help English-speaking travelers plan their weekends. " + "Use the available tools to gather the weekend date, current weather, and local activities. " + "Only recommend activities that align with the weather and include the date in your final response." + ), + tools=[get_weather, get_activities, get_current_date], + model=_chat_model(), +) + +# cSpell:disable +SPANISH_WEEKEND_PLANNER = Agent( + name="Planificador de fin de semana (Español)", + instructions=( + "Ayudas a viajeros hispanohablantes a planificar su fin de semana. " + "Usa las herramientas disponibles para obtener la fecha, el clima y actividades locales. " + "Recomienda actividades acordes al clima e incluye la fecha del fin de semana en tu respuesta." + ), + tools=[get_weather, get_activities, get_current_date], + model=_chat_model(), +) + +TRIAGE_AGENT = Agent( + name="Weekend Planner Triage", + instructions=( + "Revisa el idioma del viajero. " + "Si el mensaje está en español, realiza un handoff a 'Planificador de fin de semana (Español)'. " + "De lo contrario, usa 'Weekend Planner (English)'." + ), + handoffs=[SPANISH_WEEKEND_PLANNER, ENGLISH_WEEKEND_PLANNER], + model=_chat_model(), +) +# cSpell:enable + + +def _root_span_name(provider: str) -> str: + return f"weekend_planning_session[{provider}]" + + +def _apply_weekend_semconv( + span: trace.Span, + *, + user_text: str, + final_text: str, + conversation_id: str | None, + response_id: str, + final_agent_name: str | None, + success: bool, +) -> None: + parsed = urlparse(api_config.base_url) + if parsed.hostname: + span.set_attribute("server.address", parsed.hostname) + if parsed.port: + span.set_attribute("server.port", parsed.port) + + span.set_attribute("gen_ai.operation.name", "invoke_agent") + span.set_attribute("gen_ai.provider.name", api_config.provider) + span.set_attribute("gen_ai.request.model", api_config.model_name) + span.set_attribute("gen_ai.output.type", "text") + span.set_attribute("gen_ai.response.model", api_config.model_name) + span.set_attribute("gen_ai.response.id", response_id) + span.set_attribute( + "gen_ai.response.finish_reasons", + ["stop"] if success else ["error"], + ) + + if conversation_id: + span.set_attribute("gen_ai.conversation.id", conversation_id) + if TRIAGE_AGENT.instructions: + span.set_attribute("gen_ai.system_instructions", TRIAGE_AGENT.instructions) + if final_agent_name: + span.set_attribute("gen_ai.agent.name", final_agent_name) + else: + span.set_attribute("gen_ai.agent.name", TRIAGE_AGENT.name) + if user_text: + span.set_attribute( + "gen_ai.input.messages", + json.dumps([{"role": "user", "content": user_text}]), + ) + if final_text: + span.set_attribute( + "gen_ai.output.messages", + json.dumps([{"role": "assistant", "content": final_text}]), + ) + + +def _extract_user_text(request: CreateResponse) -> str: + """Extract the first user text input from the request body.""" + + input = request.get("input") + if not input: + return "" + + first = input[0] + content = first.get("content", None) if isinstance(first, dict) else first + if isinstance(content, str): + return content + + if isinstance(content, list): + for item in content: + text = item.get("text", None) + if text: + return text + return "" + + +def _stream_final_text(final_text: str, context: AgentRunContext): + """Yield streaming events for the provided final text.""" + + async def _async_stream(): + assembled = "" + yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) + item_id = context.id_generator.generate_message_id() + yield ResponseOutputItemAddedEvent( + output_index=0, + item=ResponsesAssistantMessageItemResource( + id=item_id, + status="in_progress", + content=[ItemContentOutputText(text="", annotations=[])], + ), + ) + + words = final_text.split(" ") + for idx, token in enumerate(words): + piece = token if idx == len(words) - 1 else token + " " + assembled += piece + yield ResponseTextDeltaEvent(output_index=0, content_index=0, delta=piece) + + yield ResponseTextDoneEvent(output_index=0, content_index=0, text=assembled) + yield ResponseCompletedEvent( + response=OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id=context.response_id, + created_at=datetime.now(timezone.utc), + output=[ + ResponsesAssistantMessageItemResource( + id=item_id, + status="completed", + content=[ItemContentOutputText(text=assembled, annotations=[])], + ) + ], + ) + ) + + return _async_stream() + + +def dump(title: str, payload: object) -> None: + """Pretty print helper for the tracing demo.""" + + print(f"\n=== {title} ===") + print(json.dumps(payload, indent=2)) + + +def run_content_capture_demo() -> None: + """Simulate an agent workflow using the tracing helpers without calling an API.""" + + itinerary_prompt = [ + {"role": "system", "content": "Help travelers plan memorable weekends."}, + {"role": "user", "content": "I'm visiting Seattle this weekend."}, + ] + tool_args = {"city": "Seattle", "date": "2025-05-17"} + tool_result = { + "forecast": "Light rain, highs 60°F", + "packing_tips": ["rain jacket", "waterproof shoes"], + } + + with tracing_trace("weekend-planner-simulation"): + with tracing_agent_span(name="weekend_planner_demo") as agent: + dump( + "Agent span started", + {"span_id": agent.span_id, "trace_id": agent.trace_id}, + ) + + with tracing_generation_span( + input=itinerary_prompt, + output=[ + { + "role": "assistant", + "content": ( + "Day 1 explore Pike Place Market, Day 2 visit the Museum of Pop Culture, " + "Day 3 take the Bainbridge ferry if weather allows." + ), + } + ], + model=api_config.model_name, + usage={ + "input_tokens": 128, + "output_tokens": 96, + "total_tokens": 224, + }, + ): + pass + + with tracing_function_span( + name="get_weather", + input=json.dumps(tool_args), + output=tool_result, + ): + pass + + print("\nWorkflow complete – spans exported to the configured OTLP endpoint.") + + +class WeekendPlannerContainer(FoundryCBAgent): + """Container entry point that surfaces the weekend planner agent via FoundryCBAgent.""" + + async def agent_run(self, context: AgentRunContext): + request = context.request + user_text = _extract_user_text(request) + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span(_root_span_name(api_config.provider)) as span: + span.set_attribute("user.request", user_text) + span.set_attribute("api.host", os.getenv("API_HOST", "github")) + span.set_attribute("model.name", api_config.model_name) + span.set_attribute("agent.name", TRIAGE_AGENT.name) + span.set_attribute("triage.languages", "en,es") + + try: + result = await Runner.run(TRIAGE_AGENT, input=user_text) + final_text = str(result.final_output or "") + span.set_attribute( + "agent.response", final_text[:500] if final_text else "" + ) + final_agent = getattr(result, "last_agent", None) + if final_agent and getattr(final_agent, "name", None): + span.set_attribute("agent.final", final_agent.name) + span.set_attribute("request.success", True) + _apply_weekend_semconv( + span, + user_text=user_text, + final_text=final_text, + conversation_id=context.conversation_id, + response_id=context.response_id, + final_agent_name=getattr(final_agent, "name", None), + success=True, + ) + logger.info("Weekend planning completed successfully") + except Exception as exc: # pragma: no cover - defensive logging path + span.record_exception(exc) + span.set_attribute("request.success", False) + span.set_attribute("error.type", exc.__class__.__name__) + logger.error("Error during weekend planning: %s", exc) + final_text = f"Error running agent: {exc}" + _apply_weekend_semconv( + span, + user_text=user_text, + final_text=final_text, + conversation_id=context.conversation_id, + response_id=context.response_id, + final_agent_name=None, + success=False, + ) + + if request.get("stream", False): + return _stream_final_text(final_text, context) + + response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id=context.response_id, + created_at=datetime.now(timezone.utc), + output=[ + ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="completed", + content=[ItemContentOutputText(text=final_text, annotations=[])], + ) + ], + ) + return response + + +if __name__ == "__main__": + logger.setLevel(logging.INFO) + try: + if RUN_MODE == "demo": + run_content_capture_demo() + else: + WeekendPlannerContainer().run() + finally: + trace.get_tracer_provider().shutdown() diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt new file mode 100644 index 000000000000..faf4fd5fbe2c --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/requirements.txt @@ -0,0 +1,13 @@ +openai-agents>=0.3.3 +python-dotenv +openai>=1.42.0 +azure-identity>=1.17.0 +opentelemetry-api>=1.26.0 +opentelemetry-sdk>=1.26.0 +opentelemetry-exporter-otlp-proto-http>=1.26.0 +opentelemetry-exporter-otlp-proto-grpc>=1.26.0 +opentelemetry-instrumentation-openai-agents-v2>=0.1.0 +rich>=13.9.0 +azure-ai-agentserver-core +# Optional tracing exporters +azure-monitor-opentelemetry-exporter>=1.0.0b16 diff --git a/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh new file mode 100644 index 000000000000..e3d097e14166 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/bilingual_weekend_planner/run.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Simple local runner for the bilingual weekend planner container sample. +# Examples: +# API_HOST=github GITHUB_TOKEN=... ./run.sh + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +ROOT_DIR="$(cd "$SCRIPT_DIR/../../../.." && pwd)" + +export PYTHONPATH="$ROOT_DIR:${PYTHONPATH:-}" + +if [[ -d "$ROOT_DIR/.venv" ]]; then + # shellcheck disable=SC1090 + source "$ROOT_DIR/.venv/bin/activate" +fi + +PYTHON_BIN="${ROOT_DIR}/.venv/bin/python" +if [[ ! -x "$PYTHON_BIN" ]]; then + PYTHON_BIN="python3" +fi + +"$PYTHON_BIN" -u "$SCRIPT_DIR/main.py" diff --git a/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py new file mode 100644 index 000000000000..af9812826941 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py @@ -0,0 +1,246 @@ +# mypy: ignore-errors +"""Custom MCP simple sample. + +This sample combines the patterns from: + - langgraph `mcp_simple` (uses MultiServerMCPClient to discover tools) + - `custom_mock_agent_test` (implements a custom FoundryCBAgent with streaming events) + +Goal: When invoked in stream mode, emit MCP list tools related stream events so a +consumer (UI / CLI) can visualize tool enumeration plus a final assistant +message. In non-stream mode, return a single aggregated response summarizing +the tools. + +Run: + python mcp_simple.py + +Then call (example): + curl -X POST http://localhost:8088/responses -H 'Content-Type: application/json' -d '{ + "agent": {"name": "custom_mcp", "type": "agent_reference"}, + "stream": true, + "input": "List the tools available" + }' +""" + +import datetime +import json +from typing import AsyncGenerator, List + +from langchain_mcp_adapters.client import MultiServerMCPClient + +from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent +from azure.ai.agentserver.core.models import Response as OpenAIResponse +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + MCPListToolsItemResource, + MCPListToolsTool, + ResponseCompletedEvent, + ResponseCreatedEvent, + ResponseMCPListToolsCompletedEvent, + ResponseMCPListToolsInProgressEvent, + ResponseOutputItemAddedEvent, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + + +class MCPToolsAgent(FoundryCBAgent): + def __init__(self): # noqa: D401 + super().__init__() + # Lazy init; created on first request to avoid startup latency if unused + self._mcp_client = None + + async def _get_client(self) -> MultiServerMCPClient: + if self._mcp_client is None: + # Mirror langgraph sample server config + self._mcp_client = MultiServerMCPClient( + { + "mslearn": { + "url": "https://learn.microsoft.com/api/mcp", + "transport": "streamable_http", + } + } + ) + return self._mcp_client + + async def _list_tools(self) -> List[MCPListToolsTool]: + client = await self._get_client() + try: + raw_tools = await client.get_tools() + tools: List[MCPListToolsTool] = [] + for t in raw_tools: + # Support either dict-like or attribute-based tool objects + if isinstance(t, dict): + name = t.get("name", "unknown_tool") + description = t.get("description") + schema = ( + t.get("input_schema") + or t.get("schema") + or t.get("parameters") + or {} + ) + else: # Fallback to attribute access + name = getattr(t, "name", "unknown_tool") + description = getattr(t, "description", None) + schema = ( + getattr(t, "input_schema", None) + or getattr(t, "schema", None) + or getattr(t, "parameters", None) + or {} + ) + tools.append( + MCPListToolsTool( + name=name, + description=description, + input_schema=schema, + ) + ) + if not tools: + raise ValueError("No tools discovered from MCP server") + return tools + except Exception: # noqa: BLE001 + # Provide deterministic fallback so sample always works offline + return [ + MCPListToolsTool( + name="fallback_echo", + description="Echo back provided text.", + input_schema={ + "type": "object", + "properties": {"text": {"type": "string"}}, + "required": ["text"], + }, + ) + ] + + async def agent_run(self, context: AgentRunContext): # noqa: D401 + """Implements the FoundryCBAgent contract. + + Streaming path emits MCP list tools events + assistant summary. + Non-stream path returns aggregated assistant message. + """ + + tools = await self._list_tools() + + if context.stream: + + async def stream() -> AsyncGenerator: # noqa: D401 + # Initial empty response context (pattern from mock sample) + yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) + + # Indicate listing in progress + yield ResponseMCPListToolsInProgressEvent() + + mcp_item = MCPListToolsItemResource( + id=context.id_generator.generate("mcp_list"), + server_label="mslearn", + tools=tools, + ) + yield ResponseOutputItemAddedEvent(output_index=0, item=mcp_item) + yield ResponseMCPListToolsCompletedEvent() + + # Assistant streaming summary + assistant_item = ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="in_progress", + content=[ItemContentOutputText(text="", annotations=[])], + ) + yield ResponseOutputItemAddedEvent(output_index=1, item=assistant_item) + + summary_text = "Discovered MCP tools: " + ", ".join( + t.name for t in tools + ) + assembled = "" + parts = summary_text.split(" ") + for i, token in enumerate(parts): + piece = token if i == len(parts) - 1 else token + " " # keep spaces + assembled += piece + yield ResponseTextDeltaEvent( + output_index=1, content_index=0, delta=piece + ) + yield ResponseTextDoneEvent( + output_index=1, content_index=0, text=assembled + ) + + final_response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id=context.response_id, + created_at=datetime.datetime.now(), + output=[ + mcp_item, + ResponsesAssistantMessageItemResource( + id=assistant_item.id, + status="completed", + content=[ + ItemContentOutputText(text=assembled, annotations=[]) + ], + ), + ], + ) + yield ResponseCompletedEvent(response=final_response) + + return stream() + + # Non-stream path: single assistant message + # Build a JSON-serializable summary. Avoid dumping complex model/schema objects that + # can include non-serializable metaclass references (seen in error stacktrace). + safe_tools = [] + for t in tools: + schema = t.input_schema + # Simplify schema to plain dict/str; if not directly serializable, fallback to string. + if isinstance(schema, (str, int, float, bool)) or schema is None: + safe_schema = schema + elif isinstance(schema, dict): + # Shallow copy ensuring nested values are primitive or stringified + safe_schema = {} + for k, v in schema.items(): + if isinstance(v, (str, int, float, bool, type(None), list, dict)): + safe_schema[k] = v + else: + safe_schema[k] = str(v) + else: + safe_schema = str(schema) + safe_tools.append( + { + "name": t.name, + "description": t.description, + # Provide only top-level schema keys if dict. + "input_schema_keys": list(safe_schema.keys()) + if isinstance(safe_schema, dict) + else safe_schema, + } + ) + summary = { + "server_label": "mslearn", + "tool_count": len(tools), + "tools": safe_tools, + } + content = [ + ItemContentOutputText( + text="MCP tool listing completed.\n" + json.dumps(summary, indent=2), + annotations=[], + ) + ] + return OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="user", + id="id", + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="completed", + content=content, + ) + ], + ) + + +my_agent = MCPToolsAgent() + +if __name__ == "__main__": + my_agent.run() diff --git a/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt new file mode 100644 index 000000000000..525ee6af3f7d --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/mcp_simple/requirements.txt @@ -0,0 +1,2 @@ +langchain-mcp-adapters==0.1.11 +azure-ai-agentserver-core diff --git a/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py new file mode 100644 index 000000000000..3d4187a188f2 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py @@ -0,0 +1,104 @@ +# mypy: ignore-errors +import datetime + +from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent +from azure.ai.agentserver.core.models import Response as OpenAIResponse +from azure.ai.agentserver.core.models.projects import ( + ItemContentOutputText, + ResponseCompletedEvent, + ResponseCreatedEvent, + ResponseOutputItemAddedEvent, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + + +def stream_events(text: str, context: AgentRunContext): + item_id = context.id_generator.generate_message_id() + + assembled = "" + yield ResponseCreatedEvent(response=OpenAIResponse(output=[])) + yield ResponseOutputItemAddedEvent( + output_index=0, + item=ResponsesAssistantMessageItemResource( + id=item_id, + status="in_progress", + content=[ + ItemContentOutputText( + text="", + annotations=[], + ) + ], + ), + ) + for i, token in enumerate(text.split(" ")): + piece = token if i == len(text.split(" ")) - 1 else token + " " + assembled += piece + yield ResponseTextDeltaEvent(output_index=0, content_index=0, delta=piece) + # Done with text + yield ResponseTextDoneEvent(output_index=0, content_index=0, text=assembled) + yield ResponseCompletedEvent( + response=OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="me", + id=context.response_id, + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + id=item_id, + status="completed", + content=[ + ItemContentOutputText( + text=assembled, + annotations=[], + ) + ], + ) + ], + ) + ) + + +async def agent_run(context: AgentRunContext): + agent = context.request.get("agent") + print(f"agent:{agent}") + + if context.stream: + return stream_events( + "I am mock agent with no intelligence in stream mode.", context + ) + + # Build assistant output content + output_content = [ + ItemContentOutputText( + text="I am mock agent with no intelligence.", + annotations=[], + ) + ] + + response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="me", + id=context.response_id, + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + id=context.id_generator.generate_message_id(), + status="completed", + content=output_content, + ) + ], + ) + return response + + +my_agent = FoundryCBAgent() +my_agent.agent_run = agent_run + +if __name__ == "__main__": + my_agent.run() diff --git a/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt new file mode 100644 index 000000000000..3f2b4e9ee6b4 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/samples/simple_mock_agent/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-core diff --git a/sdk/ai/azure-ai-agentserver-core/tests/conftest.py b/sdk/ai/azure-ai-agentserver-core/tests/conftest.py new file mode 100644 index 000000000000..e84bdfff3bd7 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/tests/conftest.py @@ -0,0 +1,456 @@ +""" +Pytest configuration for samples gated tests. + +This file automatically loads environment variables from .env file +and provides shared test fixtures. +""" + +import json +import logging +import os +import socket +import subprocess +import sys +import time +from pathlib import Path +from typing import Any, Dict, Optional + +import pytest +import requests +from dotenv import load_dotenv + +# Load .env file from project root or current directory +# conftest.py is at: src/adapter/python/tests/gated_test/conftest.py +# Need to go up 6 levels to reach project root +project_root = Path(__file__).parent.parent +env_paths = [ + project_root / ".env", # Project root + Path.cwd() / ".env", # Current working directory + Path(__file__).parent / ".env", # Test directory +] + +for env_path in env_paths: + if env_path.exists(): + load_dotenv(env_path, override=True) + break + +# Setup logging +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + handlers=[logging.StreamHandler(sys.stdout)], +) +logger = logging.getLogger(__name__) + + +class AgentTestClient: + """Generic test client for all agent types.""" + + def __init__( + self, + sample_name: str, + script_name: str, + endpoint: str = "/responses", # Default endpoint + base_url: Optional[str] = None, + env_vars: Optional[Dict[str, str]] = None, + timeout: int = 120, + port: Optional[int] = None, + ): + self.sample_name = sample_name + self.script_name = script_name + self.endpoint = endpoint + self.timeout = timeout + + # Setup paths + self.project_root = project_root # Use already defined project_root + self.sample_dir = self.project_root / "samples" / sample_name + self.original_dir = os.getcwd() + + # Determine port assignment priority: explicit param > env override > random + if env_vars and env_vars.get("DEFAULT_AD_PORT"): + self.port = int(env_vars["DEFAULT_AD_PORT"]) + elif port is not None: + self.port = port + else: + self.port = self._find_free_port() + + # Configure base URL for client requests + self.base_url = (base_url or f"http://127.0.0.1:{self.port}").rstrip("/") + + # Setup environment + # Get Agent Framework configuration (new format) + azure_ai_project_endpoint = os.getenv("AZURE_AI_PROJECT_ENDPOINT", "") + azure_ai_model_deployment = os.getenv("AZURE_AI_MODEL_DEPLOYMENT_NAME", "") + agent_project_name = os.getenv("AGENT_PROJECT_NAME", "") + + # Get legacy Azure OpenAI configuration (for backward compatibility) + main_api_key = os.getenv("AZURE_OPENAI_API_KEY", "") + main_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT", "") + main_api_version = os.getenv("OPENAI_API_VERSION", "2025-03-01-preview") + embedding_api_version = os.getenv("AZURE_OPENAI_EMBEDDINGS_API_VERSION", "2024-02-01") + + self.env_vars = { + "PYTHONIOENCODING": "utf-8", + "LANG": "C.UTF-8", + "LC_ALL": "C.UTF-8", + "PYTHONUNBUFFERED": "1", + # Agent Framework environment variables (new) + "AZURE_AI_PROJECT_ENDPOINT": azure_ai_project_endpoint, + "AZURE_AI_MODEL_DEPLOYMENT_NAME": azure_ai_model_deployment, + "AGENT_PROJECT_NAME": agent_project_name, + # Legacy Azure OpenAI environment variables (for backward compatibility) + "AZURE_OPENAI_API_KEY": main_api_key, + "AZURE_OPENAI_ENDPOINT": main_endpoint, + "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME": os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", ""), + "OPENAI_API_VERSION": main_api_version, + } + + # Auto-configure embeddings to use main config if not explicitly set + # This allows using the same Azure OpenAI resource for both chat and embeddings + self.env_vars["AZURE_OPENAI_EMBEDDINGS_API_KEY"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_API_KEY", + main_api_key, # Fallback to main API key + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_ENDPOINT"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_ENDPOINT", + main_endpoint, # Fallback to main endpoint + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME", "" + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_API_VERSION"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_API_VERSION", + embedding_api_version, # Fallback to main API version + ) + self.env_vars["AZURE_OPENAI_EMBEDDINGS_MODEL_NAME"] = os.getenv( + "AZURE_OPENAI_EMBEDDINGS_MODEL_NAME", + os.getenv("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME", ""), # Fallback to deployment name + ) + + if env_vars: + self.env_vars.update(env_vars) + + # Ensure server picks the dynamically assigned port and clients know how to reach it + self.env_vars.setdefault("DEFAULT_AD_PORT", str(self.port)) + self.env_vars.setdefault("AGENT_BASE_URL", self.base_url) + + self.process = None + self.session = requests.Session() + + @staticmethod + def _find_free_port() -> int: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.bind(("127.0.0.1", 0)) + return sock.getsockname()[1] + + def setup(self): + """Setup test environment.""" + os.chdir(self.sample_dir) + + logger.info( + "Configured %s to listen on %s", + self.sample_name, + f"{self.base_url}{self.endpoint}", + ) + + # Validate critical environment variables + # For Agent Framework samples, check new env vars first + required_vars = [] + if "agent_framework" in self.sample_name: + # Agent Framework samples use new format + required_vars = [ + "AZURE_AI_PROJECT_ENDPOINT", + "AZURE_AI_MODEL_DEPLOYMENT_NAME", + ] + else: + # Legacy samples use old format + required_vars = [ + "AZURE_OPENAI_API_KEY", + "AZURE_OPENAI_ENDPOINT", + "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", + ] + + missing_vars = [] + for var in required_vars: + value = self.env_vars.get(var) or os.getenv(var) + if not value: + missing_vars.append(var) + else: + logger.debug(f"Environment variable {var} is set") + + if missing_vars: + logger.error(f"Missing required environment variables: {', '.join(missing_vars)}") + logger.error(f"Sample name: {self.sample_name}") + if "agent_framework" in self.sample_name: + logger.error("For Agent Framework samples, please set:") + logger.error(" - AZURE_AI_PROJECT_ENDPOINT") + logger.error(" - AZURE_AI_MODEL_DEPLOYMENT_NAME") + pytest.skip(f"Missing required environment variables: {', '.join(missing_vars)}") + + # Set environment variables + for key, value in self.env_vars.items(): + if value: # Only set if value is not empty + os.environ[key] = value + + # Start server + self.start_server() + + # Wait for server to be ready + if not self.wait_for_ready(): + self.cleanup() + logger.error(f"{self.sample_name} server failed to start") + pytest.skip(f"{self.sample_name} server failed to start") + + def start_server(self): + """Start the agent server.""" + logger.info( + "Starting %s server in %s on port %s", + self.sample_name, + self.sample_dir, + self.port, + ) + + env = os.environ.copy() + env.update(self.env_vars) + env["DEFAULT_AD_PORT"] = str(self.port) + env.setdefault("AGENT_BASE_URL", self.base_url) + + # Use unbuffered output to capture logs in real-time + self.process = subprocess.Popen( + [sys.executable, "-u", self.script_name], # -u for unbuffered output + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, # Merge stderr into stdout + env=env, + text=True, + encoding="utf-8", + errors="replace", + bufsize=1, # Line buffered + ) + logger.info(f"Server process started with PID {self.process.pid}") + + def wait_for_ready(self, max_attempts: int = 30, delay: float = 1.0) -> bool: + """Wait for server to be ready.""" + logger.info( + "Waiting for server to be ready at %s (max %s attempts)", + f"{self.base_url}{self.endpoint}", + max_attempts, + ) + + for i in range(max_attempts): + # Check process status first + if self.process.poll() is not None: + # Process has terminated - read all output + stdout, stderr = self.process.communicate() + logger.error(f"Server terminated with code {self.process.returncode}") + logger.error("=== SERVER OUTPUT ===") + if stdout: + logger.error(stdout) + if stderr: + logger.error("=== STDERR ===") + logger.error(stderr) + return False + + # Read and log any available output + self._log_server_output() + + # Check health endpoint + try: + health_response = self.session.get(f"{self.base_url}/readiness", timeout=2) + if health_response.status_code == 200: + logger.info(f"Server ready after {i + 1} attempts") + return True + else: + logger.debug(f"Health check attempt {i + 1}: status {health_response.status_code}") + except Exception as e: + logger.debug(f"Health check attempt {i + 1} failed: {e}") + # After several failed attempts, show server output for debugging + if i > 5 and i % 5 == 0: + logger.warning(f"Server still not ready after {i + 1} attempts, checking output...") + self._log_server_output(force=True) + + time.sleep(delay) + + # Timeout reached - dump all server output + logger.error(f"Server failed to start within {max_attempts} attempts") + self._dump_server_output() + return False + + def cleanup(self): + """Cleanup resources.""" + if self.process: + try: + self.process.terminate() + self.process.wait(timeout=5) + except Exception: + self.process.kill() + + os.chdir(self.original_dir) + + def request( + self, + input_data: Any, + stream: bool = False, + timeout: Optional[int] = None, + debug: bool = False, + ) -> requests.Response: + """Send request to the server.""" + url = f"{self.base_url}{self.endpoint}" + timeout = timeout or self.timeout + + payload = {"input": input_data, "stream": stream} + + headers = { + "Content-Type": "application/json; charset=utf-8", + "Accept": "application/json; charset=utf-8", + } + + if debug: + logger.info(f">>> POST {url}") + logger.info(f">>> Headers: {headers}") + logger.info(f">>> Payload: {json.dumps(payload, indent=2)}") + + try: + response = self.session.post(url, json=payload, headers=headers, timeout=timeout, stream=stream) + + if debug: + logger.info(f"<<< Status: {response.status_code}") + logger.info(f"<<< Headers: {dict(response.headers)}") + + # For non-streaming responses, log the body + if not stream: + try: + content = response.json() + logger.info(f"<<< Body: {json.dumps(content, indent=2)}") + except (ValueError, requests.exceptions.JSONDecodeError): + logger.info(f"<<< Body: {response.text}") + + return response + + except Exception as e: + logger.error(f"Request failed: {e}") + self._log_server_output() + raise + + def _log_server_output(self, force=False): + """Log server output for debugging.""" + if self.process and self.process.poll() is None and hasattr(self.process, "stdout"): + try: + import select + + if hasattr(select, "select"): + # Use non-blocking read + ready, _, _ = select.select([self.process.stdout], [], [], 0.1) + if ready: + # Read available lines without blocking + import fcntl + import os as os_module + + # Set non-blocking mode + fd = self.process.stdout.fileno() + fl = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, fl | os_module.O_NONBLOCK) + + try: + while True: + line = self.process.stdout.readline() + if not line: + break + line = line.strip() + if line: + if force or any( + keyword in line.lower() + for keyword in [ + "error", + "exception", + "traceback", + "failed", + ] + ): + logger.error(f"Server output: {line}") + else: + logger.info(f"Server output: {line}") + except BlockingIOError: + pass # No more data available + except Exception as e: + if force: + logger.debug(f"Could not read server output: {e}") + + def _dump_server_output(self): + """Dump all remaining server output.""" + if self.process: + try: + # Try to read any remaining output + if self.process.poll() is None: + # Process still running, terminate and get output + self.process.terminate() + try: + stdout, stderr = self.process.communicate(timeout=5) + except subprocess.TimeoutExpired: + self.process.kill() + stdout, stderr = self.process.communicate() + else: + stdout, stderr = self.process.communicate() + + if stdout: + logger.error(f"=== FULL SERVER OUTPUT ===\n{stdout}") + if stderr: + logger.error(f"=== FULL SERVER STDERR ===\n{stderr}") + except Exception as e: + logger.error(f"Failed to dump server output: {e}") + + +@pytest.fixture +def basic_client(): + """Fixture for basic agent tests.""" + client = AgentTestClient( + sample_name="agent_framework/basic_simple", + script_name="minimal_example.py", + endpoint="/responses", + timeout=60, + ) + client.setup() + yield client + client.cleanup() + + +@pytest.fixture +def workflow_client(): + """Fixture for workflow agent tests (reflection pattern with Worker + Reviewer).""" + client = AgentTestClient( + sample_name="agent_framework/workflow_agent_simple", + script_name="workflow_agent_simple.py", + endpoint="/responses", # Changed from /runs to /responses + timeout=600, # Increased timeout for workflow agent (reflection loop may need multiple iterations) + ) + client.setup() + yield client + client.cleanup() + + +@pytest.fixture +def mcp_client(): + """Fixture for MCP simple agent tests (uses Microsoft Learn MCP, no auth required).""" + client = AgentTestClient( + sample_name="agent_framework/mcp_simple", + script_name="mcp_simple.py", + endpoint="/responses", # Changed from /runs to /responses + timeout=120, + ) + client.setup() + yield client + client.cleanup() + + +@pytest.fixture +def mcp_apikey_client(): + """Fixture for MCP API Key agent tests (uses GitHub MCP, requires GITHUB_TOKEN).""" + client = AgentTestClient( + sample_name="agent_framework/mcp_apikey", + script_name="mcp_apikey.py", + endpoint="/responses", # Changed from /runs to /responses + timeout=120, + env_vars={"GITHUB_TOKEN": os.getenv("GITHUB_TOKEN", "")}, + ) + client.setup() + yield client + client.cleanup() diff --git a/sdk/ai/azure-ai-agentserver-core/tests/env-template b/sdk/ai/azure-ai-agentserver-core/tests/env-template new file mode 100644 index 000000000000..33c60226b90b --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/tests/env-template @@ -0,0 +1,31 @@ +# ===== Agent Framework Configuration (NEW - Required for agent_framework samples) ===== +# Required for all Agent Framework samples (basic_simple, mcp_simple, mcp_apikey, workflow_agent_simple) +AZURE_AI_PROJECT_ENDPOINT=https://.region.project.azure.ai/ +AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o + +# Optional: Azure AI Project resource ID for telemetry +# Format: /subscriptions//resourceGroups//providers/Microsoft.MachineLearningServices/workspaces/ +AGENT_PROJECT_NAME= + +# GitHub Token for MCP samples (mcp_simple, mcp_apikey) +# Get from: https://github.com/settings/tokens +GITHUB_TOKEN=your-github-token-here + +# ===== Legacy Azure OpenAI Configuration (For backward compatibility) ===== +AZURE_OPENAI_API_KEY=your-api-key-here +AZURE_OPENAI_ENDPOINT=https://your-endpoint.openai.azure.com/ +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=gpt-4o +OPENAI_API_VERSION=2025-03-01-preview + +# Azure OpenAI Embeddings Configuration (for RAG tests) +# If not set, will use the same values as Chat API +AZURE_OPENAI_EMBEDDINGS_API_KEY=your-embeddings-api-key-here +AZURE_OPENAI_EMBEDDINGS_ENDPOINT=https://your-endpoint.openai.azure.com/ +AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME=text-embedding-ada-002 +AZURE_OPENAI_EMBEDDINGS_API_VERSION=2025-03-01-preview + +# Note: +# - Copy this file to .env and fill in your actual values +# - Never commit .env file to git (it's in .gitignore) +# - In CI/CD, these values are loaded from GitHub Secrets + diff --git a/sdk/ai/azure-ai-agentserver-core/tests/test_custom.py b/sdk/ai/azure-ai-agentserver-core/tests/test_custom.py new file mode 100644 index 000000000000..f8f2075e22e5 --- /dev/null +++ b/sdk/ai/azure-ai-agentserver-core/tests/test_custom.py @@ -0,0 +1,298 @@ +#!/usr/bin/env python3 +""" +Custom agents samples gated test. + +This module tests all Custom agent samples with parametrized test cases. +Each sample gets its own test class with multiple test scenarios. +""" + +import os +import socket +import subprocess +import sys +import time +from pathlib import Path +from typing import Any + +import pytest +import requests + +# Add the project root to the path +project_root = Path(__file__).parent.parent +sys.path.insert(0, str(project_root)) + + +class BaseCustomAgentTest: + """Base class for Custom agent sample tests with common utilities.""" + + def __init__(self, sample_name: str, script_name: str): + """ + Initialize test configuration. + + Args: + sample_name: Name of the sample directory (e.g., 'simple_mock_agent') + script_name: Name of the Python script to run (e.g., 'custom_mock_agent_test.py') + """ + self.sample_name = sample_name + self.script_name = script_name + self.sample_dir = project_root / "samples" / sample_name + self.port = self._find_free_port() + self.base_url = f"http://127.0.0.1:{self.port}" + self.responses_endpoint = f"{self.base_url}/responses" + self.process = None + self.original_dir = os.getcwd() + + def setup(self): + """Set up environment (dependencies are pre-installed in CI/CD).""" + os.chdir(self.sample_dir) + + def start_server(self): + """Start the agent server in background.""" + # Prepare environment with UTF-8 encoding to handle emoji in agent output + env = os.environ.copy() + env["PYTHONIOENCODING"] = "utf-8" + env["DEFAULT_AD_PORT"] = str(self.port) + env.setdefault("AGENT_BASE_URL", self.base_url) + + # Use subprocess.DEVNULL to avoid buffering issues + self.process = subprocess.Popen( + [sys.executable, self.script_name], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + ) + + def wait_for_ready(self, max_attempts: int = 30, delay: float = 1.0) -> bool: + """Wait for the server to be ready.""" + for _i in range(max_attempts): + # Check if process is still running + if self.process and self.process.poll() is not None: + # Process has terminated + print(f"Server process terminated unexpectedly with exit code {self.process.returncode}") + return False + + try: + response = requests.get(f"{self.base_url}/readiness", timeout=1) + if response.status_code == 200: + return True + except requests.exceptions.RequestException: + pass + + try: + response = requests.get(self.base_url, timeout=1) + if response.status_code in [200, 404]: + return True + except requests.exceptions.RequestException: + pass + + time.sleep(delay) + + # Server didn't start - print diagnostics + if self.process: + self.process.terminate() + stdout, stderr = self.process.communicate(timeout=5) + print(f"Server failed to start. Logs:\n{stdout}\nErrors:\n{stderr}") + + return False + + def send_request(self, input_data: Any, stream: bool = False, timeout: int = 30) -> requests.Response: + """ + Send a request to the agent. + + Args: + input_data: Input to send (string or structured message) + stream: Whether to use streaming + timeout: Request timeout in seconds + + Returns: + Response object + """ + payload = { + "agent": {"name": "mock_agent", "type": "agent_reference"}, + "input": input_data, + "stream": stream, + } + + # Note: Only set stream parameter for requests.post if streaming is requested + # Otherwise, let requests handle response body reading with timeout + if stream: + return requests.post(self.responses_endpoint, json=payload, timeout=timeout, stream=True) + else: + return requests.post(self.responses_endpoint, json=payload, timeout=timeout) + + def cleanup(self): + """Clean up resources and restore directory.""" + if self.process: + try: + self.process.terminate() + self.process.wait(timeout=5) + except Exception: + self.process.kill() + + os.chdir(self.original_dir) + + @staticmethod + def _find_free_port() -> int: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.bind(("127.0.0.1", 0)) + return sock.getsockname()[1] + + +class TestSimpleMockAgent: + """Test suite for Simple Mock Agent - uses shared server.""" + + @pytest.fixture(scope="class") + def mock_server(self): + """Shared server instance for all mock agent tests.""" + tester = BaseCustomAgentTest("simple_mock_agent", "custom_mock_agent_test.py") + tester.setup() + tester.start_server() + + if not tester.wait_for_ready(): + tester.cleanup() + pytest.fail("Simple Mock Agent server failed to start") + + yield tester + tester.cleanup() + + @pytest.mark.parametrize( + "input_text,expected_keywords,description", + [ + ("Hello, mock agent!", ["mock"], "simple_greeting"), + ("Test message", ["mock"], "test_message"), + ("What can you do?", ["mock"], "capability_query"), + ], + ) + def test_mock_agent_queries(self, mock_server, input_text: str, expected_keywords: list, description: str): + """Test mock agent with various queries.""" + response = mock_server.send_request(input_text, stream=False) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + response_text = response.text.lower() + found_keyword = any(kw.lower() in response_text for kw in expected_keywords) + assert found_keyword, f"Expected one of {expected_keywords} in response" + + def test_streaming_response(self, mock_server): + """Test mock agent with streaming response.""" + response = mock_server.send_request("Hello, streaming test!", stream=True) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + # Verify we can read streaming data + lines_read = 0 + for line in response.iter_lines(): + if line: + lines_read += 1 + if lines_read >= 3: + break + + assert lines_read > 0, "Expected to read at least one line from streaming response" + + +@pytest.mark.skip +class TestMcpSimple: + """Test suite for Custom MCP Simple - uses Microsoft Learn MCP.""" + + @pytest.fixture(scope="class") + def mcp_server(self): + """Shared server instance for all MCP Simple tests.""" + tester = BaseCustomAgentTest("mcp_simple", "mcp_simple.py") + tester.setup() + tester.start_server() + + if not tester.wait_for_ready(): + tester.cleanup() + pytest.fail("MCP Simple server failed to start") + + yield tester + tester.cleanup() + + @pytest.mark.parametrize( + "input_text,expected_keywords,description", + [ + ( + "What Azure services can I use for image generation?", + ["image", "generation", "azure"], + "image_generation", + ), + ( + "Show me documentation about Azure App Service", + ["app", "service", "azure"], + "app_service_docs", + ), + ], + ) + def test_mcp_operations(self, mcp_server, input_text: str, expected_keywords: list, description: str): + """Test MCP Simple with Microsoft Learn queries.""" + response = mcp_server.send_request(input_text, stream=False, timeout=60) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + response_text = response.text.lower() + found_keyword = any(kw.lower() in response_text for kw in expected_keywords) + assert found_keyword, f"Expected one of {expected_keywords} in response" + + +@pytest.mark.skip +class TestBilingualWeekendPlanner: + """Test suite for the bilingual weekend planner custom sample.""" + + @pytest.fixture(scope="class") + def weekend_planner_server(self): + """Shared server fixture for bilingual weekend planner tests.""" + pytest.importorskip("azure.identity") + pytest.importorskip("agents") + pytest.importorskip("openai") + + tester = BaseCustomAgentTest("bilingual_weekend_planner", "main.py") + tester.setup() + + env_overrides = { + "API_HOST": "github", + "GITHUB_TOKEN": os.environ.get("GITHUB_TOKEN", "unit-test-token"), + "GITHUB_OPENAI_BASE_URL": os.environ.get("GITHUB_OPENAI_BASE_URL", "http://127.0.0.1:65535"), + "WEEKEND_PLANNER_MODE": "container", + } + original_env = {key: os.environ.get(key) for key in env_overrides} + os.environ.update(env_overrides) + + try: + tester.start_server() + + if not tester.wait_for_ready(max_attempts=60, delay=1.0): + tester.cleanup() + pytest.fail("Bilingual weekend planner server failed to start") + + yield tester + finally: + tester.cleanup() + for key, value in original_env.items(): + if value is None: + os.environ.pop(key, None) + else: + os.environ[key] = value + + def test_offline_planner_response(self, weekend_planner_server): + """Verify the planner responds with a graceful error when the model is unreachable.""" + response = weekend_planner_server.send_request("Plan my weekend in Seattle", stream=False, timeout=60) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + response_text = response.text.lower() + assert "error running agent" in response_text + + def test_streaming_offline_response(self, weekend_planner_server): + """Verify streaming responses deliver data even when the model call fails.""" + response = weekend_planner_server.send_request("Planifica mi fin de semana en Madrid", stream=True, timeout=60) + + assert response.status_code == 200, f"Expected 200, got {response.status_code}" + + lines_read = 0 + for line in response.iter_lines(): + if line: + lines_read += 1 + if lines_read >= 3: + break + + assert lines_read > 0, "Expected to read at least one line from streaming response" diff --git a/sdk/ai/ci.yml b/sdk/ai/ci.yml index 20ceb03e1897..8d23540df136 100644 --- a/sdk/ai/ci.yml +++ b/sdk/ai/ci.yml @@ -63,6 +63,8 @@ extends: safeName: azureaiagents - name: azure-ai-voicelive safeName: azureaivoicelive + - name: azure-ai-agentserver-core + safeName: azureaiagentservercore # These packages are deprecated: # - name: azure-ai-generative # safeName: azureaigenerative diff --git a/shared_requirements.txt b/shared_requirements.txt index f6f6bab34b2e..678a161b3a06 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -83,4 +83,8 @@ prompty Jinja2 azure-ai-language-conversations azure-ai-textanalytics -azure-confidentialledger-certificate \ No newline at end of file +azure-confidentialledger-certificate +azure-ai-projects +starlette +uvicorn +opentelemetry-exporter-otlp-proto-http \ No newline at end of file