diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/_meta.json b/sdk/documentintelligence/azure-ai-documentintelligence/_meta.json new file mode 100644 index 000000000000..4c68700f83a8 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/_meta.json @@ -0,0 +1,6 @@ +{ + "commit": "ad3000cb1377aaf2556700bc5a40dd771ac1ce09", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "typespec_src": "specification/ai/DocumentIntelligence", + "@azure-tools/typespec-python": "0.36.1" +} \ No newline at end of file diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/__init__.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/__init__.py index 5d62b4bb7381..c474f6a4267e 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/__init__.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/__init__.py @@ -5,21 +5,30 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import DocumentIntelligenceClient -from ._patch import DocumentIntelligenceAdministrationClient +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import DocumentIntelligenceClient # type: ignore +from ._client import DocumentIntelligenceAdministrationClient # type: ignore from ._version import VERSION __version__ = VERSION - -from ._patch import AnalyzeDocumentLROPoller +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = [ - "AnalyzeDocumentLROPoller", "DocumentIntelligenceClient", "DocumentIntelligenceAdministrationClient", ] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_model_base.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_model_base.py index 9d401b0cf012..e6a2730f9276 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_model_base.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_model_base.py @@ -5,7 +5,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=protected-access, arguments-differ, signature-differs, broad-except, too-many-lines +# pylint: disable=protected-access, broad-except import copy import calendar @@ -574,7 +574,7 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: def copy(self) -> "Model": return Model(self.__dict__) - def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # pylint: disable=unused-argument + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' @@ -585,8 +585,8 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # pylint: di annotations = { k: v for mro_class in mros - if hasattr(mro_class, "__annotations__") # pylint: disable=no-member - for k, v in mro_class.__annotations__.items() # pylint: disable=no-member + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() } for attr, rf in attr_to_rest_field.items(): rf._module = cls.__module__ @@ -601,8 +601,8 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # pylint: di def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: for base in cls.__bases__: - if hasattr(base, "__mapping__"): # pylint: disable=no-member - base.__mapping__[discriminator or cls.__name__] = cls # type: ignore # pylint: disable=no-member + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore @classmethod def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: @@ -613,7 +613,7 @@ def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField @classmethod def _deserialize(cls, data, exist_discriminators): - if not hasattr(cls, "__mapping__"): # pylint: disable=no-member + if not hasattr(cls, "__mapping__"): return cls(data) discriminator = cls._get_discriminator(exist_discriminators) if discriminator is None: @@ -633,7 +633,7 @@ def _deserialize(cls, data, exist_discriminators): discriminator_value = data.find(xml_name).text # pyright: ignore else: discriminator_value = data.get(discriminator._rest_name) - mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore return mapped_cls._deserialize(data, exist_discriminators) def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/__init__.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/__init__.py index 98b551bb5d27..63b6bbb81ed8 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/__init__.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/__init__.py @@ -5,15 +5,23 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import DocumentIntelligenceClientOperationsMixin -from ._patch import DocumentIntelligenceAdministrationClientOperationsMixin +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import +from ._operations import DocumentIntelligenceClientOperationsMixin # type: ignore +from ._operations import DocumentIntelligenceAdministrationClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ "DocumentIntelligenceClientOperationsMixin", "DocumentIntelligenceAdministrationClientOperationsMixin", ] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_operations.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_operations.py index 4e00f61ddfd2..d08e43c4004a 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_operations.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_operations.py @@ -747,7 +747,7 @@ def begin_analyze_document( :type model_id: str :param analyze_request: Analyze request parameters. Default value is None. :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -799,7 +799,7 @@ def begin_analyze_document( :type model_id: str :param analyze_request: Analyze request parameters. Default value is None. :type analyze_request: JSON - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -851,7 +851,7 @@ def begin_analyze_document( :type model_id: str :param analyze_request: Analyze request parameters. Default value is None. :type analyze_request: IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -904,7 +904,7 @@ def begin_analyze_document( AnalyzeDocumentRequest, JSON, IO[bytes] Default value is None. :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest or JSON or IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -1101,7 +1101,7 @@ def begin_analyze_batch_documents( :type model_id: str :param analyze_batch_request: Analyze batch request parameters. Default value is None. :type analyze_batch_request: ~azure.ai.documentintelligence.models.AnalyzeBatchDocumentsRequest - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -1153,7 +1153,7 @@ def begin_analyze_batch_documents( :type model_id: str :param analyze_batch_request: Analyze batch request parameters. Default value is None. :type analyze_batch_request: JSON - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -1205,7 +1205,7 @@ def begin_analyze_batch_documents( :type model_id: str :param analyze_batch_request: Analyze batch request parameters. Default value is None. :type analyze_batch_request: IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -1258,7 +1258,7 @@ def begin_analyze_batch_documents( AnalyzeBatchDocumentsRequest, JSON, IO[bytes] Default value is None. :type analyze_batch_request: ~azure.ai.documentintelligence.models.AnalyzeBatchDocumentsRequest or JSON or IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -1579,7 +1579,7 @@ def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. @@ -1615,7 +1615,7 @@ def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. @@ -1651,7 +1651,7 @@ def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword content_type: Body Parameter content-type. Content type parameter for binary body. @@ -1688,7 +1688,7 @@ def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :return: An instance of LROPoller that returns AnalyzeResult. The AnalyzeResult is compatible diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py index 63877a8c9830..f7dd32510333 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_operations/_patch.py @@ -6,632 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -import sys -import re -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, Mapping, cast, overload +from typing import List -from azure.core.pipeline import PipelineResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.polling.base_polling import LROBasePolling -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from ._operations import ( - DocumentIntelligenceClientOperationsMixin as GeneratedDIClientOps, - DocumentIntelligenceAdministrationClientOperationsMixin as GeneratedDIAdminClientOps, -) -from .. import models as _models -from .._model_base import _deserialize - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -PollingReturnType_co = TypeVar("PollingReturnType_co", covariant=True) -_FINISHED = frozenset(["succeeded", "canceled", "failed", "completed"]) - - -def _parse_operation_id(operation_location_header): - regex = "[^:]+://[^/]+/documentintelligence/.+/([^?/]+)" - return re.match(regex, operation_location_header).group(1) - -def _finished(status) -> bool: - if hasattr(status, "value"): - status = status.value - return str(status).lower() in _FINISHED - - -class AnalyzeDocumentLROPoller(LROPoller[PollingReturnType_co]): - @property - def details(self) -> Mapping[str, Any]: - """Returns metadata associated with the long-running operation. - - :return: Returns metadata associated with the long-running operation. - :rtype: Mapping[str, Any] - """ - return { - "operation_id": _parse_operation_id( - self.polling_method()._initial_response.http_response.headers["Operation-Location"] # type: ignore # pylint: disable=protected-access - ), - } - - @classmethod - def from_continuation_token( - cls, polling_method: PollingMethod[PollingReturnType_co], continuation_token: str, **kwargs: Any - ) -> "AnalyzeDocumentLROPoller": - ( - client, - initial_response, - deserialization_callback, - ) = polling_method.from_continuation_token(continuation_token, **kwargs) - - return cls(client, initial_response, deserialization_callback, polling_method) - - -class AnalyzeBatchDocumentsLROPollingMethod(LROBasePolling): - def finished(self) -> bool: - """Is this polling finished? - - :return: Whether polling is finished or not. - :rtype: bool - """ - return _finished(self.status()) - - -class DocumentIntelligenceAdministrationClientOperationsMixin( - GeneratedDIAdminClientOps -): # pylint: disable=name-too-long - @distributed_trace - def begin_build_classifier( - self, build_request: Union[_models.BuildDocumentClassifierRequest, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.DocumentClassifierDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentClassifierDetails] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._build_classifier_initial( - build_request=build_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs, - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentClassifierDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.DocumentClassifierDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.DocumentClassifierDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace - def begin_build_document_model( - self, build_request: Union[_models.BuildDocumentModelRequest, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.DocumentModelDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentModelDetails] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._build_document_model_initial( - build_request=build_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs, - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentModelDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.DocumentModelDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.DocumentModelDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace - def begin_compose_model( - self, compose_request: Union[_models.ComposeDocumentModelRequest, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.DocumentModelDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentModelDetails] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._compose_model_initial( - compose_request=compose_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs, - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentModelDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.DocumentModelDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.DocumentModelDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace - def begin_copy_model_to( - self, model_id: str, copy_to_request: Union[_models.CopyAuthorization, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.DocumentModelDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentModelDetails] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._copy_model_to_initial( - model_id=model_id, - copy_to_request=copy_to_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs, - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentModelDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller[_models.DocumentModelDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller[_models.DocumentModelDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - -class DocumentIntelligenceClientOperationsMixin(GeneratedDIClientOps): # pylint: disable=name-too-long - @overload - def begin_analyze_document( - self, - model_id: str, - analyze_request: Optional[_models.AnalyzeDocumentRequest] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - content_type: str = "application/json", - **kwargs: Any, - ) -> AnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Default value is None. - :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is compatible - with MutableMapping - :rtype: AnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_analyze_document( - self, - model_id: str, - analyze_request: Optional[JSON] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - content_type: str = "application/json", - **kwargs: Any, - ) -> AnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Default value is None. - :type analyze_request: JSON - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is compatible - with MutableMapping - :rtype: AnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_analyze_document( - self, - model_id: str, - analyze_request: Optional[IO[bytes]] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - content_type: str = "application/json", - **kwargs: Any, - ) -> AnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Default value is None. - :type analyze_request: IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is compatible - with MutableMapping - :rtype: AnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_analyze_document( - self, - model_id: str, - analyze_request: Optional[Union[_models.AnalyzeDocumentRequest, JSON, IO[bytes]]] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - **kwargs: Any, - ) -> AnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Is one of the following types: - AnalyzeDocumentRequest, JSON, IO[bytes] Default value is None. - :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest or JSON or - IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :return: An instance of AnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is compatible - with MutableMapping - :rtype: AnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) - cls: ClsType[_models.AnalyzeResult] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._analyze_document_initial( - model_id=model_id, - analyze_request=analyze_request, - pages=pages, - locale=locale, - string_index_type=string_index_type, - features=features, - query_fields=query_fields, - output_content_format=output_content_format, - output=output, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs, - ) - raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.AnalyzeResult, response.json().get("analyzeResult")) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return AnalyzeDocumentLROPoller[_models.AnalyzeResult].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AnalyzeDocumentLROPoller[_models.AnalyzeResult]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace - def begin_analyze_batch_documents( - self, - model_id: str, - analyze_batch_request: Optional[Union[_models.AnalyzeBatchDocumentsRequest, JSON, IO[bytes]]] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - **kwargs: Any, - ) -> LROPoller[_models.AnalyzeBatchResult]: - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - return super().begin_analyze_batch_documents( - model_id=model_id, - analyze_batch_request=analyze_batch_request, - pages=pages, - locale=locale, - string_index_type=string_index_type, - features=features, - query_fields=query_fields, - output_content_format=output_content_format, - output=output, - polling=AnalyzeBatchDocumentsLROPollingMethod(timeout=lro_delay), - **kwargs, - ) - - -__all__: List[str] = [ - "DocumentIntelligenceClientOperationsMixin", - "DocumentIntelligenceAdministrationClientOperationsMixin", -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py index 7ec98325b614..f7dd32510333 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_patch.py @@ -6,86 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import Any, List, Union -from azure.core.credentials import AzureKeyCredential, TokenCredential -from ._client import ( - DocumentIntelligenceClient as DIClientGenerated, - DocumentIntelligenceAdministrationClient as DIAClientGenerated, -) -from ._operations._patch import AnalyzeDocumentLROPoller +from typing import List - -class DocumentIntelligenceClient(DIClientGenerated): - """DocumentIntelligenceClient. - - :param endpoint: The Document Intelligence service endpoint. Required. - :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Is either a - AzureKeyCredential type or a TokenCredential type. Required. - :type credential: ~azure.core.credentials.AzureKeyCredential or - ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported - behavior. - :paramtype api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - """ - - def __init__( - self, - endpoint: str, - credential: Union[AzureKeyCredential, TokenCredential], - **kwargs: Any, - ) -> None: - # Patch the default polling interval to be 1s. - polling_interval = kwargs.pop("polling_interval", 1) - super().__init__( - endpoint=endpoint, - credential=credential, - polling_interval=polling_interval, - **kwargs, - ) - - -class DocumentIntelligenceAdministrationClient(DIAClientGenerated): - """DocumentIntelligenceAdministrationClient. - - :param endpoint: The Document Intelligence service endpoint. Required. - :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Is either a - AzureKeyCredential type or a TokenCredential type. Required. - :type credential: ~azure.core.credentials.AzureKeyCredential or - ~azure.core.credentials.TokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported - behavior. - :paramtype api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - """ - - def __init__( - self, - endpoint: str, - credential: Union[AzureKeyCredential, TokenCredential], - **kwargs: Any, - ) -> None: - # Patch the default polling interval to be 1s. - polling_interval = kwargs.pop("polling_interval", 1) - super().__init__( - endpoint=endpoint, - credential=credential, - polling_interval=polling_interval, - **kwargs, - ) - - -__all__: List[str] = [ - "DocumentIntelligenceClient", - "DocumentIntelligenceAdministrationClient", - "AnalyzeDocumentLROPoller", -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_serialization.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_serialization.py index 480e941d758f..ce17d1798ce7 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_serialization.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_serialization.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. @@ -506,7 +507,6 @@ def _flatten_subtype(cls, key, objects): def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. - Remove the polymorphic key from the initial data. :param dict response: The initial data :param dict objects: The class objects @@ -518,7 +518,7 @@ def _classify(cls, response, objects): if not isinstance(response, ET.Element): rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) else: subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) if subtype_value: diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_validation.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_validation.py deleted file mode 100644 index 752b2822f9d3..000000000000 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_validation.py +++ /dev/null @@ -1,50 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import functools - - -def api_version_validation(**kwargs): - params_added_on = kwargs.pop("params_added_on", {}) - method_added_on = kwargs.pop("method_added_on", "") - - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - try: - # this assumes the client has an _api_version attribute - client = args[0] - client_api_version = client._config.api_version # pylint: disable=protected-access - except AttributeError: - return func(*args, **kwargs) - - if method_added_on > client_api_version: - raise ValueError( - f"'{func.__name__}' is not available in API version " - f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." - ) - - unsupported = { - parameter: api_version - for api_version, parameters in params_added_on.items() - for parameter in parameters - if parameter in kwargs and api_version > client_api_version - } - if unsupported: - raise ValueError( - "".join( - [ - f"'{param}' is not available in API version {client_api_version}. " - f"Use service API version {version} or newer.\n" - for param, version in unsupported.items() - ] - ) - ) - return func(*args, **kwargs) - - return wrapper - - return decorator diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_version.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_version.py index c7d155d924dd..bbcd28b4aa67 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_version.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b5" +VERSION = "1.0.0b2" diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/__init__.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/__init__.py index d42be0aa8a75..02790b905f25 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/__init__.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/__init__.py @@ -5,18 +5,27 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import DocumentIntelligenceClient -from ._patch import DocumentIntelligenceAdministrationClient +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import -from ._patch import AsyncAnalyzeDocumentLROPoller +from ._client import DocumentIntelligenceClient # type: ignore +from ._client import DocumentIntelligenceAdministrationClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = [ - "AsyncAnalyzeDocumentLROPoller", "DocumentIntelligenceClient", "DocumentIntelligenceAdministrationClient", ] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/__init__.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/__init__.py index 98b551bb5d27..63b6bbb81ed8 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/__init__.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/__init__.py @@ -5,15 +5,23 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import DocumentIntelligenceClientOperationsMixin -from ._patch import DocumentIntelligenceAdministrationClientOperationsMixin +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import +from ._operations import DocumentIntelligenceClientOperationsMixin # type: ignore +from ._operations import DocumentIntelligenceAdministrationClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ "DocumentIntelligenceClientOperationsMixin", "DocumentIntelligenceAdministrationClientOperationsMixin", ] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_operations.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_operations.py index aa9dc521495f..763bb83de9ec 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_operations.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_operations.py @@ -176,7 +176,7 @@ async def begin_analyze_document( :type model_id: str :param analyze_request: Analyze request parameters. Default value is None. :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -228,7 +228,7 @@ async def begin_analyze_document( :type model_id: str :param analyze_request: Analyze request parameters. Default value is None. :type analyze_request: JSON - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -280,7 +280,7 @@ async def begin_analyze_document( :type model_id: str :param analyze_request: Analyze request parameters. Default value is None. :type analyze_request: IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -333,7 +333,7 @@ async def begin_analyze_document( AnalyzeDocumentRequest, JSON, IO[bytes] Default value is None. :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest or JSON or IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -531,7 +531,7 @@ async def begin_analyze_batch_documents( :type model_id: str :param analyze_batch_request: Analyze batch request parameters. Default value is None. :type analyze_batch_request: ~azure.ai.documentintelligence.models.AnalyzeBatchDocumentsRequest - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -584,7 +584,7 @@ async def begin_analyze_batch_documents( :type model_id: str :param analyze_batch_request: Analyze batch request parameters. Default value is None. :type analyze_batch_request: JSON - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -637,7 +637,7 @@ async def begin_analyze_batch_documents( :type model_id: str :param analyze_batch_request: Analyze batch request parameters. Default value is None. :type analyze_batch_request: IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -691,7 +691,7 @@ async def begin_analyze_batch_documents( AnalyzeBatchDocumentsRequest, JSON, IO[bytes] Default value is None. :type analyze_batch_request: ~azure.ai.documentintelligence.models.AnalyzeBatchDocumentsRequest or JSON or IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword locale: Locale hint for text recognition and document analysis. Value may contain @@ -1014,7 +1014,7 @@ async def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. @@ -1050,7 +1050,7 @@ async def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. @@ -1086,7 +1086,7 @@ async def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :keyword content_type: Body Parameter content-type. Content type parameter for binary body. @@ -1123,7 +1123,7 @@ async def begin_classify_document( :keyword split: Document splitting mode. Known values are: "auto", "none", and "perPage". Default value is None. :paramtype split: str or ~azure.ai.documentintelligence.models.SplitMode - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is + :keyword pages: List of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is None. :paramtype pages: str :return: An instance of AsyncLROPoller that returns AnalyzeResult. The AnalyzeResult is diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py index da3223c795b7..f7dd32510333 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_operations/_patch.py @@ -6,625 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -import sys -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, Mapping, cast, overload +from typing import List -from azure.core.pipeline import PipelineResponse -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.polling.async_base_polling import AsyncLROBasePolling -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ._operations import ( - DocumentIntelligenceClientOperationsMixin as GeneratedDIClientOps, - DocumentIntelligenceAdministrationClientOperationsMixin as GeneratedDIAdminClientOps, -) -from ... import models as _models -from ..._model_base import _deserialize -from ..._operations._patch import PollingReturnType_co, _parse_operation_id, _finished - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class AsyncAnalyzeDocumentLROPoller(AsyncLROPoller[PollingReturnType_co]): - @property - def details(self) -> Mapping[str, Any]: - """Returns metadata associated with the long-running operation. - - :return: Returns metadata associated with the long-running operation. - :rtype: Mapping[str, Any] - """ - return { - "operation_id": _parse_operation_id( - self.polling_method()._initial_response.http_response.headers["Operation-Location"] # type: ignore # pylint: disable=protected-access - ), - } - - @classmethod - def from_continuation_token( - cls, polling_method: AsyncPollingMethod[PollingReturnType_co], continuation_token: str, **kwargs: Any - ) -> "AsyncAnalyzeDocumentLROPoller": - ( - client, - initial_response, - deserialization_callback, - ) = polling_method.from_continuation_token(continuation_token, **kwargs) - - return cls(client, initial_response, deserialization_callback, polling_method) - - -class AsyncAnalyzeBatchDocumentsLROPollingMethod(AsyncLROBasePolling): # pylint: disable=name-too-long - def finished(self) -> bool: - """Is this polling finished? - - :return: Whether the polling finished or not. - :rtype: bool - """ - return _finished(self.status()) - - -class DocumentIntelligenceAdministrationClientOperationsMixin( - GeneratedDIAdminClientOps -): # pylint: disable=name-too-long - @distributed_trace_async - async def begin_build_classifier( # type: ignore[override] - self, build_request: Union[_models.BuildDocumentClassifierRequest, JSON, IO[bytes]], **kwargs: Any - ) -> AsyncLROPoller[_models.DocumentClassifierDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentClassifierDetails] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._build_classifier_initial( - build_request=build_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentClassifierDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.DocumentClassifierDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.DocumentClassifierDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace_async - async def begin_build_document_model( # type: ignore[override] - self, build_request: Union[_models.BuildDocumentModelRequest, JSON, IO[bytes]], **kwargs: Any - ) -> AsyncLROPoller[_models.DocumentModelDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentModelDetails] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._build_document_model_initial( - build_request=build_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentModelDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.DocumentModelDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.DocumentModelDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace_async - async def begin_compose_model( # type: ignore[override] - self, compose_request: Union[_models.ComposeDocumentModelRequest, JSON, IO[bytes]], **kwargs: Any - ) -> AsyncLROPoller[_models.DocumentModelDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentModelDetails] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._compose_model_initial( - compose_request=compose_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentModelDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.DocumentModelDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.DocumentModelDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace_async - async def begin_copy_model_to( # type: ignore[override] - self, model_id: str, copy_to_request: Union[_models.CopyAuthorization, JSON, IO[bytes]], **kwargs: Any - ) -> AsyncLROPoller[_models.DocumentModelDetails]: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DocumentModelDetails] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._copy_model_to_initial( - model_id=model_id, - copy_to_request=copy_to_request, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.DocumentModelDetails, response.json()) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller[_models.DocumentModelDetails].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller[_models.DocumentModelDetails]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - -class DocumentIntelligenceClientOperationsMixin(GeneratedDIClientOps): # pylint: disable=name-too-long - @overload - async def begin_analyze_document( - self, - model_id: str, - analyze_request: Optional[_models.AnalyzeDocumentRequest] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncAnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Default value is None. - :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncAnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is - compatible with MutableMapping - :rtype: AsyncAnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_analyze_document( - self, - model_id: str, - analyze_request: Optional[JSON] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncAnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Default value is None. - :type analyze_request: JSON - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncAnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is - compatible with MutableMapping - :rtype: AsyncAnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_analyze_document( - self, - model_id: str, - analyze_request: Optional[IO[bytes]] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> AsyncAnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Default value is None. - :type analyze_request: IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: An instance of AsyncAnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is - compatible with MutableMapping - :rtype: AsyncAnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_analyze_document( # type: ignore[override] - self, - model_id: str, - analyze_request: Optional[Union[_models.AnalyzeDocumentRequest, JSON, IO[bytes]]] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - **kwargs: Any - ) -> AsyncAnalyzeDocumentLROPoller[_models.AnalyzeResult]: - """Analyzes document with document model. - - :param model_id: Unique document model name. Required. - :type model_id: str - :param analyze_request: Analyze request parameters. Is one of the following types: - AnalyzeDocumentRequest, JSON, IO[bytes] Default value is None. - :type analyze_request: ~azure.ai.documentintelligence.models.AnalyzeDocumentRequest or JSON or - IO[bytes] - :keyword pages: Range of 1-based page numbers to analyze. Ex. "1-3,5,7-9". Default value is - None. - :paramtype pages: str - :keyword locale: Locale hint for text recognition and document analysis. Value may contain - only - the language code (ex. "en", "fr") or BCP 47 language tag (ex. "en-US"). Default value is - None. - :paramtype locale: str - :keyword string_index_type: Method used to compute string offset and length. Known values are: - "textElements", "unicodeCodePoint", and "utf16CodeUnit". Default value is None. - :paramtype string_index_type: str or ~azure.ai.documentintelligence.models.StringIndexType - :keyword features: List of optional analysis features. Default value is None. - :paramtype features: list[str or ~azure.ai.documentintelligence.models.DocumentAnalysisFeature] - :keyword query_fields: List of additional fields to extract. Ex. "NumberOfGuests,StoreNumber". - Default value is None. - :paramtype query_fields: list[str] - :keyword output_content_format: Format of the analyze result top-level content. Known values - are: "text" and "markdown". Default value is None. - :paramtype output_content_format: str or ~azure.ai.documentintelligence.models.ContentFormat - :keyword output: Additional outputs to generate during analysis. Default value is None. - :paramtype output: list[str or ~azure.ai.documentintelligence.models.AnalyzeOutputOption] - :return: An instance of AsyncAnalyzeDocumentLROPoller that returns AnalyzeResult. The AnalyzeResult is - compatible with MutableMapping - :rtype: AsyncAnalyzeDocumentLROPoller[~azure.ai.documentintelligence.models.AnalyzeResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) - cls: ClsType[_models.AnalyzeResult] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._analyze_document_initial( - model_id=model_id, - analyze_request=analyze_request, - pages=pages, - locale=locale, - string_index_type=string_index_type, - features=features, - query_fields=query_fields, - output_content_format=output_content_format, - output=output, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - await raw_result.http_response.read() # type: ignore - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response_headers = {} - response = pipeline_response.http_response - response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - deserialized = _deserialize(_models.AnalyzeResult, response.json().get("analyzeResult")) - if cls: - return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized - - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, - AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncAnalyzeDocumentLROPoller[_models.AnalyzeBatchResult].from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncAnalyzeDocumentLROPoller[_models.AnalyzeResult]( - self._client, raw_result, get_long_running_output, polling_method # type: ignore - ) - - @distributed_trace_async - async def begin_analyze_batch_documents( # type: ignore[override] - self, - model_id: str, - analyze_batch_request: Optional[Union[_models.AnalyzeBatchDocumentsRequest, JSON, IO[bytes]]] = None, - *, - pages: Optional[str] = None, - locale: Optional[str] = None, - string_index_type: Optional[Union[str, _models.StringIndexType]] = None, - features: Optional[List[Union[str, _models.DocumentAnalysisFeature]]] = None, - query_fields: Optional[List[str]] = None, - output_content_format: Optional[Union[str, _models.ContentFormat]] = None, - output: Optional[List[Union[str, _models.AnalyzeOutputOption]]] = None, - **kwargs: Any - ) -> AsyncLROPoller[_models.AnalyzeBatchResult]: - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - return await super().begin_analyze_batch_documents( - model_id=model_id, - analyze_batch_request=analyze_batch_request, - pages=pages, - locale=locale, - string_index_type=string_index_type, - features=features, - query_fields=query_fields, - output_content_format=output_content_format, - output=output, - polling=AsyncAnalyzeBatchDocumentsLROPollingMethod(timeout=lro_delay), - **kwargs - ) - - -__all__: List[str] = [ - "DocumentIntelligenceClientOperationsMixin", - "DocumentIntelligenceAdministrationClientOperationsMixin", -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py index 821d56fb6891..f7dd32510333 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/aio/_patch.py @@ -6,85 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import Any, List, Union +from typing import List -from azure.core.credentials import AzureKeyCredential -from azure.core.credentials_async import AsyncTokenCredential - -from ._client import ( - DocumentIntelligenceClient as DIClientGenerated, - DocumentIntelligenceAdministrationClient as DIAClientGenerated, -) -from ..aio._operations._patch import AsyncAnalyzeDocumentLROPoller - - -class DocumentIntelligenceClient(DIClientGenerated): - """DocumentIntelligenceClient. - - :param endpoint: The Document Intelligence service endpoint. Required. - :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Is either a - AzureKeyCredential type or a TokenCredential type. Required. - :type credential: ~azure.core.credentials.AzureKeyCredential or - ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported - behavior. - :paramtype api_version: str - """ - - def __init__( - self, - endpoint: str, - credential: Union[AzureKeyCredential, AsyncTokenCredential], - **kwargs: Any, - ) -> None: - # Patch the default polling interval to be 1s. - polling_interval = kwargs.pop("polling_interval", 1) - super().__init__( - endpoint=endpoint, - credential=credential, - polling_interval=polling_interval, - **kwargs, - ) - - -class DocumentIntelligenceAdministrationClient(DIAClientGenerated): - """DocumentIntelligenceAdministrationClient. - - :param endpoint: The Document Intelligence service endpoint. Required. - :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Is either a - AzureKeyCredential type or a TokenCredential type. Required. - :type credential: ~azure.core.credentials.AzureKeyCredential or - ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: The API version to use for this operation. Default value is - "2024-07-31-preview". Note that overriding this default value may result in unsupported - behavior. - :paramtype api_version: str - """ - - def __init__( - self, - endpoint: str, - credential: Union[AzureKeyCredential, AsyncTokenCredential], - **kwargs: Any, - ) -> None: - # Patch the default polling interval to be 1s. - polling_interval = kwargs.pop("polling_interval", 1) - super().__init__( - endpoint=endpoint, - credential=credential, - polling_interval=polling_interval, - **kwargs, - ) - - -__all__: List[str] = [ - "DocumentIntelligenceClient", - "DocumentIntelligenceAdministrationClient", - "AsyncAnalyzeDocumentLROPoller", -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/__init__.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/__init__.py index 2f7221ee110a..f82d490b0a79 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/__init__.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/__init__.py @@ -5,85 +5,97 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._models import AddressValue -from ._models import AnalyzeBatchDocumentsRequest -from ._models import AnalyzeBatchOperationDetail -from ._models import AnalyzeBatchResult -from ._models import AnalyzeBatchResultOperation -from ._patch import AnalyzeDocumentRequest -from ._models import AnalyzeResult -from ._models import AnalyzeResultOperation -from ._models import AuthorizeClassifierCopyRequest -from ._models import AuthorizeCopyRequest -from ._models import AzureBlobContentSource -from ._models import AzureBlobFileListContentSource -from ._models import BoundingRegion -from ._models import BuildDocumentClassifierRequest -from ._models import BuildDocumentModelRequest -from ._models import ClassifierCopyAuthorization -from ._models import ClassifierDocumentTypeDetails -from ._patch import ClassifyDocumentRequest -from ._models import ComposeDocumentModelRequest -from ._models import CopyAuthorization -from ._models import CurrencyValue -from ._models import CustomDocumentModelsDetails -from ._models import Document -from ._models import DocumentBarcode -from ._models import DocumentCaption -from ._models import DocumentClassifierBuildOperationDetails -from ._models import DocumentClassifierCopyToOperationDetails -from ._models import DocumentClassifierDetails -from ._models import DocumentField -from ._models import DocumentFieldSchema -from ._models import DocumentFigure -from ._models import DocumentFootnote -from ._models import DocumentFormula -from ._models import DocumentKeyValueElement -from ._models import DocumentKeyValuePair -from ._models import DocumentLanguage -from ._models import DocumentLine -from ._models import DocumentModelBuildOperationDetails -from ._models import DocumentModelComposeOperationDetails -from ._models import DocumentModelCopyToOperationDetails -from ._models import DocumentModelDetails -from ._models import DocumentPage -from ._models import DocumentParagraph -from ._models import DocumentSection -from ._models import DocumentSelectionMark -from ._models import DocumentSpan -from ._models import DocumentStyle -from ._models import DocumentTable -from ._models import DocumentTableCell -from ._models import DocumentTypeDetails -from ._models import DocumentWord -from ._models import Error -from ._models import ErrorResponse -from ._models import InnerError -from ._models import OperationDetails -from ._models import ResourceDetails -from ._models import Warning +from typing import TYPE_CHECKING -from ._enums import AnalyzeOutputOption -from ._enums import ContentFormat -from ._enums import ContentSourceKind -from ._enums import DocumentAnalysisFeature -from ._enums import DocumentBarcodeKind -from ._enums import DocumentBuildMode -from ._enums import DocumentFieldType -from ._enums import DocumentFormulaKind -from ._enums import DocumentSelectionMarkState -from ._enums import DocumentSignatureType -from ._enums import DocumentTableCellKind -from ._enums import FontStyle -from ._enums import FontWeight -from ._enums import LengthUnit -from ._enums import OperationKind -from ._enums import OperationStatus -from ._enums import ParagraphRole -from ._enums import SplitMode -from ._enums import StringIndexType +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._models import ( # type: ignore + AddressValue, + AnalyzeBatchDocumentsRequest, + AnalyzeBatchOperationDetail, + AnalyzeBatchResult, + AnalyzeBatchResultOperation, + AnalyzeDocumentRequest, + AnalyzeResult, + AnalyzeResultOperation, + AuthorizeClassifierCopyRequest, + AuthorizeCopyRequest, + AzureBlobContentSource, + AzureBlobFileListContentSource, + BoundingRegion, + BuildDocumentClassifierRequest, + BuildDocumentModelRequest, + ClassifierCopyAuthorization, + ClassifierDocumentTypeDetails, + ClassifyDocumentRequest, + ComposeDocumentModelRequest, + CopyAuthorization, + CurrencyValue, + CustomDocumentModelsDetails, + Document, + DocumentBarcode, + DocumentCaption, + DocumentClassifierBuildOperationDetails, + DocumentClassifierCopyToOperationDetails, + DocumentClassifierDetails, + DocumentField, + DocumentFieldSchema, + DocumentFigure, + DocumentFootnote, + DocumentFormula, + DocumentKeyValueElement, + DocumentKeyValuePair, + DocumentLanguage, + DocumentLine, + DocumentModelBuildOperationDetails, + DocumentModelComposeOperationDetails, + DocumentModelCopyToOperationDetails, + DocumentModelDetails, + DocumentPage, + DocumentParagraph, + DocumentSection, + DocumentSelectionMark, + DocumentSpan, + DocumentStyle, + DocumentTable, + DocumentTableCell, + DocumentTypeDetails, + DocumentWord, + Error, + ErrorResponse, + InnerError, + OperationDetails, + ResourceDetails, + Warning, +) + +from ._enums import ( # type: ignore + AnalyzeOutputOption, + ContentFormat, + ContentSourceKind, + DocumentAnalysisFeature, + DocumentBarcodeKind, + DocumentBuildMode, + DocumentFieldType, + DocumentFormulaKind, + DocumentSelectionMarkState, + DocumentSignatureType, + DocumentTableCellKind, + FontStyle, + FontWeight, + LengthUnit, + OperationKind, + OperationStatus, + ParagraphRole, + SplitMode, + StringIndexType, +) +from ._patch import __all__ as _patch_all +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ @@ -164,4 +176,5 @@ "SplitMode", "StringIndexType", ] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_models.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_models.py index 95af408671b0..0d068edfb250 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_models.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_models.py @@ -6,6 +6,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation import datetime from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload @@ -18,7 +19,7 @@ from .. import models as _models -class AddressValue(_model_base.Model): # pylint: disable=too-many-instance-attributes +class AddressValue(_model_base.Model): """Address field value. :ivar house_number: House or building number. @@ -111,7 +112,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -170,7 +171,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -216,7 +217,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -260,7 +261,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -316,7 +317,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -352,11 +353,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AnalyzeResult(_model_base.Model): # pylint: disable=too-many-instance-attributes +class AnalyzeResult(_model_base.Model): """Document analysis result. @@ -456,7 +457,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -508,7 +509,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -548,7 +549,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -588,7 +589,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -622,7 +623,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -657,7 +658,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -697,7 +698,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -748,7 +749,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -824,7 +825,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -884,7 +885,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -937,7 +938,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -974,7 +975,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1030,7 +1031,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1089,7 +1090,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1128,7 +1129,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1163,7 +1164,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1212,7 +1213,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1271,7 +1272,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1315,7 +1316,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1401,13 +1402,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DocumentClassifierBuildOperationDetails( - OperationDetails, discriminator="documentClassifierBuild" -): # pylint: disable=too-many-instance-attributes +class DocumentClassifierBuildOperationDetails(OperationDetails, discriminator="documentClassifierBuild"): """Get Operation response object. @@ -1464,13 +1463,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, kind=OperationKind.DOCUMENT_CLASSIFIER_BUILD, **kwargs) -class DocumentClassifierCopyToOperationDetails( - OperationDetails, discriminator="documentClassifierCopyTo" -): # pylint: disable=too-many-instance-attributes +class DocumentClassifierCopyToOperationDetails(OperationDetails, discriminator="documentClassifierCopyTo"): """Get Operation response object. @@ -1531,7 +1528,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, kind=OperationKind.DOCUMENT_CLASSIFIER_COPY_TO, **kwargs) @@ -1597,11 +1594,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DocumentField(_model_base.Model): # pylint: disable=too-many-instance-attributes +class DocumentField(_model_base.Model): """An object representing the content and location of a field value. @@ -1729,7 +1726,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1783,7 +1780,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1837,7 +1834,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1881,7 +1878,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1936,7 +1933,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -1976,7 +1973,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2016,7 +2013,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2026,7 +2023,7 @@ class DocumentLanguage(_model_base.Model): :ivar locale: Detected language. Value may an ISO 639-1 language code (ex. "en", "fr") or BCP 47 language tag (ex. "zh-Hans"). Required. - :vartype locale: str + :vartype locale: int :ivar spans: Location of the text elements in the concatenated content the language applies to. Required. :vartype spans: list[~azure.ai.documentintelligence.models.DocumentSpan] @@ -2034,7 +2031,7 @@ class DocumentLanguage(_model_base.Model): :vartype confidence: float """ - locale: str = rest_field() + locale: int = rest_field() """Detected language. Value may an ISO 639-1 language code (ex. \"en\", \"fr\") or BCP 47 language tag (ex. \"zh-Hans\"). Required.""" spans: List["_models.DocumentSpan"] = rest_field() @@ -2047,7 +2044,7 @@ class DocumentLanguage(_model_base.Model): def __init__( self, *, - locale: str, + locale: int, spans: List["_models.DocumentSpan"], confidence: float, ) -> None: ... @@ -2059,7 +2056,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2105,13 +2102,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DocumentModelBuildOperationDetails( - OperationDetails, discriminator="documentModelBuild" -): # pylint: disable=too-many-instance-attributes +class DocumentModelBuildOperationDetails(OperationDetails, discriminator="documentModelBuild"): """Get Operation response object. @@ -2168,13 +2163,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, kind=OperationKind.DOCUMENT_MODEL_BUILD, **kwargs) -class DocumentModelComposeOperationDetails( - OperationDetails, discriminator="documentModelCompose" -): # pylint: disable=too-many-instance-attributes +class DocumentModelComposeOperationDetails(OperationDetails, discriminator="documentModelCompose"): """Get Operation response object. @@ -2232,13 +2225,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, kind=OperationKind.DOCUMENT_MODEL_COMPOSE, **kwargs) -class DocumentModelCopyToOperationDetails( - OperationDetails, discriminator="documentModelCopyTo" -): # pylint: disable=too-many-instance-attributes +class DocumentModelCopyToOperationDetails(OperationDetails, discriminator="documentModelCopyTo"): """Get Operation response object. @@ -2299,11 +2290,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, kind=OperationKind.DOCUMENT_MODEL_COPY_TO, **kwargs) -class DocumentModelDetails(_model_base.Model): # pylint: disable=too-many-instance-attributes +class DocumentModelDetails(_model_base.Model): """Document model info. Readonly variables are only populated by the server, and will be ignored when sending a request. @@ -2403,11 +2394,11 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DocumentPage(_model_base.Model): # pylint: disable=too-many-instance-attributes +class DocumentPage(_model_base.Model): """Content and layout elements extracted from a page from the input. @@ -2488,7 +2479,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2535,7 +2526,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2569,7 +2560,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2621,7 +2612,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2656,7 +2647,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2724,7 +2715,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2783,7 +2774,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2854,7 +2845,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2926,7 +2917,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -2978,7 +2969,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -3028,7 +3019,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -3057,7 +3048,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -3095,7 +3086,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -3125,7 +3116,7 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) @@ -3164,5 +3155,5 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: :type mapping: Mapping[str, Any] """ - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py index c385980aebd5..f7dd32510333 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py +++ b/sdk/documentintelligence/azure-ai-documentintelligence/azure/ai/documentintelligence/models/_patch.py @@ -6,45 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List, Optional -from ._models import ( - AnalyzeDocumentRequest as GeneratedAnalyzeDocumentRequest, - ClassifyDocumentRequest as GeneratedClassifyDocumentRequest, -) -from .._model_base import rest_field +from typing import List - -class AnalyzeDocumentRequest(GeneratedAnalyzeDocumentRequest): - """Document analysis parameters. - - :ivar url_source: Document URL to analyze. Either url_source or bytes_source must be specified. - :vartype url_source: str - :ivar bytes_source: Document bytes to analyze. Either url_source or bytes_source must be specified. - :vartype bytes_source: bytes - """ - - bytes_source: Optional[bytes] = rest_field(name="base64Source", format="base64") - """Document bytes to analyze. Either url_source or bytes_source must be specified.""" - - -class ClassifyDocumentRequest(GeneratedClassifyDocumentRequest): - """Document classification parameters. - - :ivar url_source: Document URL to classify. Either url_source or bytes_source must be - specified. - :vartype url_source: str - :ivar bytes_source: Document bytes to classify. Either url_source or bytes_source must be specified. - :vartype bytes_source: bytes - """ - - bytes_source: Optional[bytes] = rest_field(name="base64Source", format="base64") - """Document bytes to classify. Either url_source or bytes_source must be specified.""" - - -__all__: List[str] = [ - "AnalyzeDocumentRequest", - "ClassifyDocumentRequest", -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_batch_documents.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_batch_documents.py new file mode 100644 index 000000000000..0b6c7a0d2a2c --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_batch_documents.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python analyze_batch_documents.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_analyze_batch_documents( + model_id="customModel", + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/AnalyzeBatchDocuments.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_document_base64.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_document_base64.py new file mode 100644 index 000000000000..797b208ac75f --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_document_base64.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python analyze_document_base64.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_analyze_document( + model_id="prebuilt-layout", + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/AnalyzeDocument_Base64.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_document_url.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_document_url.py new file mode 100644 index 000000000000..6103f8fdad53 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/analyze_document_url.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python analyze_document_url.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_analyze_document( + model_id="customModel", + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/AnalyzeDocument_Url.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/authorize_copy_document_classifier.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/authorize_copy_document_classifier.py new file mode 100644 index 000000000000..2c0ac34b09f8 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/authorize_copy_document_classifier.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python authorize_copy_document_classifier.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.authorize_classifier_copy( + authorize_copy_request={"classifierId": "targetClassifier", "description": "Target classifier description"}, + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/AuthorizeCopyDocumentClassifier.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/authorize_copy_document_model.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/authorize_copy_document_model.py new file mode 100644 index 000000000000..e9fda86720e2 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/authorize_copy_document_model.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python authorize_copy_document_model.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.authorize_model_copy( + authorize_copy_request={"description": "Target model description", "modelId": "targetModel"}, + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/AuthorizeCopyDocumentModel.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/build_document_classifier.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/build_document_classifier.py new file mode 100644 index 000000000000..34f36f3c9910 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/build_document_classifier.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python build_document_classifier.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_build_classifier( + build_request={ + "classifierId": "myClassifier", + "description": "Classifier description", + "docTypes": { + "formA": { + "azureBlobSource": { + "containerUrl": "https://myStorageAccount.blob.core.windows.net/myContainer?mySasToken", + "prefix": "formADocs/", + } + }, + "formB": { + "azureBlobFileListSource": { + "containerUrl": "https://myStorageAccount.blob.core.windows.net/myContainer?mySasToken", + "fileList": "formB.jsonl", + } + }, + }, + }, + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/BuildDocumentClassifier.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/build_document_model.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/build_document_model.py new file mode 100644 index 000000000000..8a5c7b344c72 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/build_document_model.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python build_document_model.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_build_document_model( + build_request={ + "azureBlobSource": { + "containerUrl": "https://myStorageAccount.blob.core.windows.net/myContainer?mySasToken", + "prefix": "trainingDocs/", + }, + "buildMode": "template", + "description": "Custom model description", + "modelId": "myCustomModel", + "tags": {"createdBy": "myUserId"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/BuildDocumentModel.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/classify_document_url.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/classify_document_url.py new file mode 100644 index 000000000000..2a756ea7d48f --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/classify_document_url.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python classify_document_url.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_classify_document( + classifier_id="classifierId", + classify_request={"urlSource": "http://host.com/doc.pdf"}, + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/ClassifyDocument_Url.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/compose_document_model.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/compose_document_model.py new file mode 100644 index 000000000000..b2c076f67d06 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/compose_document_model.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python compose_document_model.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_compose_model( + compose_request={ + "classifierId": "customClassifier", + "description": "Composed model description", + "docTypes": {"formA": {"modelId": "model1"}, "formB": {"modelId": "model2"}}, + "modelId": "composedModel", + }, + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/ComposeDocumentModel.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/copy_document_classifier_to.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/copy_document_classifier_to.py new file mode 100644 index 000000000000..870f007a27ac --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/copy_document_classifier_to.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python copy_document_classifier_to.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_copy_classifier_to( + classifier_id="sourceClassifier", + copy_to_request={ + "accessToken": "accessToken", + "expirationDateTime": "2021-09-23T09:12:54.552Z", + "targetClassifierId": "targetClassifier", + "targetClassifierLocation": "https://targetEndpoint.cognitiveservices.azure.com/documentintelligence/documentClassifiers/targetClassifier", + "targetResourceId": "/subscriptions/targetSub/resourceGroups/targetRG/providers/Microsoft.CognitiveServices/accounts/targetService", + "targetResourceRegion": "targetResourceRegion", + }, + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/CopyDocumentClassifierTo.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/copy_document_model_to.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/copy_document_model_to.py new file mode 100644 index 000000000000..23e8bba66c98 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/copy_document_model_to.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python copy_document_model_to.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.begin_copy_model_to( + model_id="sourceModel", + copy_to_request={ + "accessToken": "accessToken", + "expirationDateTime": "2021-09-23T09:12:54.552Z", + "targetModelId": "targetModel", + "targetModelLocation": "https://targetEndpoint.cognitiveservices.azure.com/documentintelligence/documentModels/targetModel", + "targetResourceId": "/subscriptions/targetSub/resourceGroups/targetRG/providers/Microsoft.CognitiveServices/accounts/targetService", + "targetResourceRegion": "targetResourceRegion", + }, + ).result() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/CopyDocumentModelTo.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/delete_document_classifier.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/delete_document_classifier.py new file mode 100644 index 000000000000..626792316492 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/delete_document_classifier.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python delete_document_classifier.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + client.delete_classifier( + classifier_id="myClassifier", + ) + + +# x-ms-original-file: 2024-07-31-preview/DeleteDocumentClassifier.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/delete_document_model.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/delete_document_model.py new file mode 100644 index 000000000000..652c5f7e96c9 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/delete_document_model.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python delete_document_model.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + client.delete_model( + model_id="myCustomModel", + ) + + +# x-ms-original-file: 2024-07-31-preview/DeleteDocumentModel.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_analyze_document_result_figure.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_analyze_document_result_figure.py new file mode 100644 index 000000000000..5120e3a4d728 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_analyze_document_result_figure.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_analyze_document_result_figure.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.get_analyze_result_figure( + model_id="prebuilt-invoice", + result_id="3b31320d-8bab-4f88-b19c-2322a7f11034", + figure_id="1.0", + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/GetAnalyzeDocumentResultFigure.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_analyze_document_result_fpdf.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_analyze_document_result_fpdf.py new file mode 100644 index 000000000000..541869693e9b --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_analyze_document_result_fpdf.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_analyze_document_result_fpdf.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.get_analyze_result_pdf( + model_id="prebuilt-invoice", + result_id="3b31320d-8bab-4f88-b19c-2322a7f11034", + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/GetAnalyzeDocumentResultFPdf.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_classifier.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_classifier.py new file mode 100644 index 000000000000..cf6ba81a1a6b --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_classifier.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_document_classifier.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.get_classifier( + classifier_id="myClassifier", + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/GetDocumentClassifier.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_classifiers.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_classifiers.py new file mode 100644 index 000000000000..39a3f1bc3cd8 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_classifiers.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_document_classifiers.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.list_classifiers() + for item in response: + print(item) + + +# x-ms-original-file: 2024-07-31-preview/GetDocumentClassifiers.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_model_custom.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_model_custom.py new file mode 100644 index 000000000000..126348cbbcca --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_model_custom.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_document_model_custom.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.get_model( + model_id="myCustomModel", + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/GetDocumentModel_Custom.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_model_prebuilt.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_model_prebuilt.py new file mode 100644 index 000000000000..0035c97e6dde --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_model_prebuilt.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_document_model_prebuilt.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.get_model( + model_id="prebuilt-invoice", + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/GetDocumentModel_Prebuilt.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_models.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_models.py new file mode 100644 index 000000000000..791fdd0a399b --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_document_models.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_document_models.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.list_models() + for item in response: + print(item) + + +# x-ms-original-file: 2024-07-31-preview/GetDocumentModels.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_operation.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_operation.py new file mode 100644 index 000000000000..c84802eee376 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_operation.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_operation.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.get_operation( + operation_id="b704bb00-d130-4f3f-a1d8-ca96de3eabb4", + ) + print(response) + + +# x-ms-original-file: 2024-07-31-preview/GetOperation.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_operations.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_operations.py new file mode 100644 index 000000000000..658a265b8e73 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_operations.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_operations.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.list_operations() + for item in response: + print(item) + + +# x-ms-original-file: 2024-07-31-preview/GetOperations.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_resource_details.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_resource_details.py new file mode 100644 index 000000000000..8e9acb938285 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_samples/get_resource_details.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.ai.documentintelligence import DocumentIntelligenceClient + +""" +# PREREQUISITES + pip install azure-ai-documentintelligence +# USAGE + python get_resource_details.py +""" + + +def main(): + client = DocumentIntelligenceClient( + endpoint="https://myendpoint.cognitiveservices.azure.com", + credential="CREDENTIAL", + ) + + response = client.get_resource_info() + print(response) + + +# x-ms-original-file: 2024-07-31-preview/GetResourceDetails.json +if __name__ == "__main__": + main() diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/conftest.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/conftest.py new file mode 100644 index 000000000000..d647c5afe1fc --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/conftest.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + documentintelligence_subscription_id = os.environ.get( + "DOCUMENTINTELLIGENCE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + documentintelligence_tenant_id = os.environ.get( + "DOCUMENTINTELLIGENCE_TENANT_ID", "00000000-0000-0000-0000-000000000000" + ) + documentintelligence_client_id = os.environ.get( + "DOCUMENTINTELLIGENCE_CLIENT_ID", "00000000-0000-0000-0000-000000000000" + ) + documentintelligence_client_secret = os.environ.get( + "DOCUMENTINTELLIGENCE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=documentintelligence_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=documentintelligence_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=documentintelligence_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=documentintelligence_client_secret, value="00000000-0000-0000-0000-000000000000") + + documentintelligenceadministration_subscription_id = os.environ.get( + "DOCUMENTINTELLIGENCEADMINISTRATION_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + documentintelligenceadministration_tenant_id = os.environ.get( + "DOCUMENTINTELLIGENCEADMINISTRATION_TENANT_ID", "00000000-0000-0000-0000-000000000000" + ) + documentintelligenceadministration_client_id = os.environ.get( + "DOCUMENTINTELLIGENCEADMINISTRATION_CLIENT_ID", "00000000-0000-0000-0000-000000000000" + ) + documentintelligenceadministration_client_secret = os.environ.get( + "DOCUMENTINTELLIGENCEADMINISTRATION_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=documentintelligenceadministration_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=documentintelligenceadministration_tenant_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=documentintelligenceadministration_client_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=documentintelligenceadministration_client_secret, value="00000000-0000-0000-0000-000000000000" + ) + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence.py new file mode 100644 index 000000000000..a2d8979b91ef --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import DocumentIntelligenceClientTestBase, DocumentIntelligencePreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDocumentIntelligence(DocumentIntelligenceClientTestBase): + @DocumentIntelligencePreparer() + @recorded_by_proxy + def test_begin_analyze_document(self, documentintelligence_endpoint): + client = self.create_client(endpoint=documentintelligence_endpoint) + response = client.begin_analyze_document( + model_id="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy + def test_begin_analyze_batch_documents(self, documentintelligence_endpoint): + client = self.create_client(endpoint=documentintelligence_endpoint) + response = client.begin_analyze_batch_documents( + model_id="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy + def test_get_analyze_result_pdf(self, documentintelligence_endpoint): + client = self.create_client(endpoint=documentintelligence_endpoint) + response = client.get_analyze_result_pdf( + model_id="str", + result_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy + def test_get_analyze_result_figure(self, documentintelligence_endpoint): + client = self.create_client(endpoint=documentintelligence_endpoint) + response = client.get_analyze_result_figure( + model_id="str", + result_id="str", + figure_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy + def test_begin_classify_document(self, documentintelligence_endpoint): + client = self.create_client(endpoint=documentintelligence_endpoint) + response = client.begin_classify_document( + classifier_id="str", + classify_request={"base64Source": bytes("bytes", encoding="utf-8"), "urlSource": "str"}, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_administration.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_administration.py new file mode 100644 index 000000000000..a149bfd573ce --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_administration.py @@ -0,0 +1,245 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import DocumentIntelligenceAdministrationClientTestBase, DocumentIntelligenceAdministrationPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDocumentIntelligenceAdministration(DocumentIntelligenceAdministrationClientTestBase): + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_begin_build_document_model(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.begin_build_document_model( + build_request={ + "buildMode": "str", + "modelId": "str", + "allowOverwrite": bool, + "azureBlobFileListSource": {"containerUrl": "str", "fileList": "str"}, + "azureBlobSource": {"containerUrl": "str", "prefix": "str"}, + "description": "str", + "maxTrainingHours": 0.0, + "tags": {"str": "str"}, + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_begin_compose_model(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.begin_compose_model( + compose_request={ + "classifierId": "str", + "docTypes": { + "str": { + "buildMode": "str", + "confidenceThreshold": 0.0, + "description": "str", + "features": ["str"], + "fieldConfidence": {"str": 0.0}, + "fieldSchema": { + "str": { + "type": "str", + "description": "str", + "example": "str", + "items": ..., + "properties": {"str": ...}, + } + }, + "maxDocumentsToAnalyze": 0, + "modelId": "str", + "queryFields": ["str"], + } + }, + "modelId": "str", + "description": "str", + "split": "str", + "tags": {"str": "str"}, + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_authorize_model_copy(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.authorize_model_copy( + authorize_copy_request={"modelId": "str", "description": "str", "tags": {"str": "str"}}, + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_begin_copy_model_to(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.begin_copy_model_to( + model_id="str", + copy_to_request={ + "accessToken": "str", + "expirationDateTime": "2020-02-20 00:00:00", + "targetModelId": "str", + "targetModelLocation": "str", + "targetResourceId": "str", + "targetResourceRegion": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_get_model(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.get_model( + model_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_list_models(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.list_models() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_delete_model(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.delete_model( + model_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_get_resource_info(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.get_resource_info() + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_get_operation(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.get_operation( + operation_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_list_operations(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.list_operations() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_begin_build_classifier(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.begin_build_classifier( + build_request={ + "classifierId": "str", + "docTypes": { + "str": { + "azureBlobFileListSource": {"containerUrl": "str", "fileList": "str"}, + "azureBlobSource": {"containerUrl": "str", "prefix": "str"}, + "sourceKind": "str", + } + }, + "allowOverwrite": bool, + "baseClassifierId": "str", + "description": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_authorize_classifier_copy(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.authorize_classifier_copy( + authorize_copy_request={"classifierId": "str", "description": "str", "tags": {"str": "str"}}, + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_begin_copy_classifier_to(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.begin_copy_classifier_to( + classifier_id="str", + copy_to_request={ + "accessToken": "str", + "expirationDateTime": "2020-02-20 00:00:00", + "targetClassifierId": "str", + "targetClassifierLocation": "str", + "targetResourceId": "str", + "targetResourceRegion": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_get_classifier(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.get_classifier( + classifier_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_list_classifiers(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.list_classifiers() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy + def test_delete_classifier(self, documentintelligenceadministration_endpoint): + client = self.create_client(endpoint=documentintelligenceadministration_endpoint) + response = client.delete_classifier( + classifier_id="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_administration_async.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_administration_async.py new file mode 100644 index 000000000000..7b1980282a18 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_administration_async.py @@ -0,0 +1,256 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import DocumentIntelligenceAdministrationPreparer +from testpreparer_async import DocumentIntelligenceAdministrationClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDocumentIntelligenceAdministrationAsync(DocumentIntelligenceAdministrationClientTestBaseAsync): + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_begin_build_document_model(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await ( + await client.begin_build_document_model( + build_request={ + "buildMode": "str", + "modelId": "str", + "allowOverwrite": bool, + "azureBlobFileListSource": {"containerUrl": "str", "fileList": "str"}, + "azureBlobSource": {"containerUrl": "str", "prefix": "str"}, + "description": "str", + "maxTrainingHours": 0.0, + "tags": {"str": "str"}, + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_begin_compose_model(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await ( + await client.begin_compose_model( + compose_request={ + "classifierId": "str", + "docTypes": { + "str": { + "buildMode": "str", + "confidenceThreshold": 0.0, + "description": "str", + "features": ["str"], + "fieldConfidence": {"str": 0.0}, + "fieldSchema": { + "str": { + "type": "str", + "description": "str", + "example": "str", + "items": ..., + "properties": {"str": ...}, + } + }, + "maxDocumentsToAnalyze": 0, + "modelId": "str", + "queryFields": ["str"], + } + }, + "modelId": "str", + "description": "str", + "split": "str", + "tags": {"str": "str"}, + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_authorize_model_copy(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.authorize_model_copy( + authorize_copy_request={"modelId": "str", "description": "str", "tags": {"str": "str"}}, + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_begin_copy_model_to(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await ( + await client.begin_copy_model_to( + model_id="str", + copy_to_request={ + "accessToken": "str", + "expirationDateTime": "2020-02-20 00:00:00", + "targetModelId": "str", + "targetModelLocation": "str", + "targetResourceId": "str", + "targetResourceRegion": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_model(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.get_model( + model_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_models(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = client.list_models() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_model(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.delete_model( + model_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_resource_info(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.get_resource_info() + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_operation(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.get_operation( + operation_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_operations(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = client.list_operations() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_begin_build_classifier(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await ( + await client.begin_build_classifier( + build_request={ + "classifierId": "str", + "docTypes": { + "str": { + "azureBlobFileListSource": {"containerUrl": "str", "fileList": "str"}, + "azureBlobSource": {"containerUrl": "str", "prefix": "str"}, + "sourceKind": "str", + } + }, + "allowOverwrite": bool, + "baseClassifierId": "str", + "description": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_authorize_classifier_copy(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.authorize_classifier_copy( + authorize_copy_request={"classifierId": "str", "description": "str", "tags": {"str": "str"}}, + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_begin_copy_classifier_to(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await ( + await client.begin_copy_classifier_to( + classifier_id="str", + copy_to_request={ + "accessToken": "str", + "expirationDateTime": "2020-02-20 00:00:00", + "targetClassifierId": "str", + "targetClassifierLocation": "str", + "targetResourceId": "str", + "targetResourceRegion": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_classifier(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.get_classifier( + classifier_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_classifiers(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = client.list_classifiers() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @DocumentIntelligenceAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_classifier(self, documentintelligenceadministration_endpoint): + client = self.create_async_client(endpoint=documentintelligenceadministration_endpoint) + response = await client.delete_classifier( + classifier_id="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_async.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_async.py new file mode 100644 index 000000000000..e7fd894224fb --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/test_document_intelligence_async.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import DocumentIntelligencePreparer +from testpreparer_async import DocumentIntelligenceClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDocumentIntelligenceAsync(DocumentIntelligenceClientTestBaseAsync): + @DocumentIntelligencePreparer() + @recorded_by_proxy_async + async def test_begin_analyze_document(self, documentintelligence_endpoint): + client = self.create_async_client(endpoint=documentintelligence_endpoint) + response = await ( + await client.begin_analyze_document( + model_id="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy_async + async def test_begin_analyze_batch_documents(self, documentintelligence_endpoint): + client = self.create_async_client(endpoint=documentintelligence_endpoint) + response = await ( + await client.begin_analyze_batch_documents( + model_id="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy_async + async def test_get_analyze_result_pdf(self, documentintelligence_endpoint): + client = self.create_async_client(endpoint=documentintelligence_endpoint) + response = await client.get_analyze_result_pdf( + model_id="str", + result_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy_async + async def test_get_analyze_result_figure(self, documentintelligence_endpoint): + client = self.create_async_client(endpoint=documentintelligence_endpoint) + response = await client.get_analyze_result_figure( + model_id="str", + result_id="str", + figure_id="str", + ) + + # please add some check logic here by yourself + # ... + + @DocumentIntelligencePreparer() + @recorded_by_proxy_async + async def test_begin_classify_document(self, documentintelligence_endpoint): + client = self.create_async_client(endpoint=documentintelligence_endpoint) + response = await ( + await client.begin_classify_document( + classifier_id="str", + classify_request={"base64Source": bytes("bytes", encoding="utf-8"), "urlSource": "str"}, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/testpreparer.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/testpreparer.py new file mode 100644 index 000000000000..ed3b18488bf5 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/testpreparer.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.ai.documentintelligence import DocumentIntelligenceAdministrationClient, DocumentIntelligenceClient +from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer +import functools + + +class DocumentIntelligenceClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(DocumentIntelligenceClient) + return self.create_client_from_credential( + DocumentIntelligenceClient, + credential=credential, + endpoint=endpoint, + ) + + +DocumentIntelligencePreparer = functools.partial( + PowerShellPreparer, + "documentintelligence", + documentintelligence_endpoint="https://fake_documentintelligence_endpoint.com", +) + + +class DocumentIntelligenceAdministrationClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(DocumentIntelligenceAdministrationClient) + return self.create_client_from_credential( + DocumentIntelligenceAdministrationClient, + credential=credential, + endpoint=endpoint, + ) + + +DocumentIntelligenceAdministrationPreparer = functools.partial( + PowerShellPreparer, + "documentintelligenceadministration", + documentintelligenceadministration_endpoint="https://fake_documentintelligenceadministration_endpoint.com", +) diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/testpreparer_async.py b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/testpreparer_async.py new file mode 100644 index 000000000000..fece6c220e81 --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/generated_tests/testpreparer_async.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from azure.ai.documentintelligence.aio import DocumentIntelligenceAdministrationClient, DocumentIntelligenceClient +from devtools_testutils import AzureRecordedTestCase + + +class DocumentIntelligenceClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(DocumentIntelligenceClient, is_async=True) + return self.create_client_from_credential( + DocumentIntelligenceClient, + credential=credential, + endpoint=endpoint, + ) + + +class DocumentIntelligenceAdministrationClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(DocumentIntelligenceAdministrationClient, is_async=True) + return self.create_client_from_credential( + DocumentIntelligenceAdministrationClient, + credential=credential, + endpoint=endpoint, + ) diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/sdk_packaging.toml b/sdk/documentintelligence/azure-ai-documentintelligence/sdk_packaging.toml new file mode 100644 index 000000000000..e7687fdae93b --- /dev/null +++ b/sdk/documentintelligence/azure-ai-documentintelligence/sdk_packaging.toml @@ -0,0 +1,2 @@ +[packaging] +auto_update = false \ No newline at end of file diff --git a/sdk/documentintelligence/azure-ai-documentintelligence/tsp-location.yaml b/sdk/documentintelligence/azure-ai-documentintelligence/tsp-location.yaml index 18b1c4b0e7f4..c0180db82864 100644 --- a/sdk/documentintelligence/azure-ai-documentintelligence/tsp-location.yaml +++ b/sdk/documentintelligence/azure-ai-documentintelligence/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/ai/DocumentIntelligence -commit: ec2a81edaecf3970e5938936e8256759905163e6 -additionalDirectories: [] +commit: ad3000cb1377aaf2556700bc5a40dd771ac1ce09 repo: Azure/azure-rest-api-specs +additionalDirectories: