diff --git a/sdk/healthdataaiservices/azure-health-deidentification/_meta.json b/sdk/healthdataaiservices/azure-health-deidentification/_meta.json
new file mode 100644
index 000000000000..70c7baffac82
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/_meta.json
@@ -0,0 +1,6 @@
+{
+ "commit": "0be55256bbf339fa9274dffcb6f053fb27898f08",
+ "repository_url": "https://github.com/Azure/azure-rest-api-specs",
+ "typespec_src": "specification/healthdataaiservices/HealthDataAIServices.DeidServices",
+ "@azure-tools/typespec-python": "0.36.4"
+}
\ No newline at end of file
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/__init__.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/__init__.py
index 01d9492ed4f6..75df74cc79f6 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/__init__.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import DeidentificationClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import DeidentificationClient # type: ignore
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -21,6 +27,6 @@
__all__ = [
"DeidentificationClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_client.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_client.py
index 787817c317d9..213889e8d30a 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_client.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_client.py
@@ -19,13 +19,10 @@
from ._serialization import Deserializer, Serializer
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DeidentificationClient(
- DeidentificationClientOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword
+class DeidentificationClient(DeidentificationClientOperationsMixin):
"""DeidentificationClient.
:param endpoint: Url of your De-identification Service. Required.
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_configuration.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_configuration.py
index 35237a99ba8f..f9d7f52cf423 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_configuration.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_configuration.py
@@ -13,11 +13,10 @@
from ._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
-class DeidentificationClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class DeidentificationClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for DeidentificationClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_model_base.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_model_base.py
index 43fd8c7e9b1b..e6a2730f9276 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_model_base.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_model_base.py
@@ -1,10 +1,11 @@
+# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
-# pylint: disable=protected-access, arguments-differ, signature-differs, broad-except
+# pylint: disable=protected-access, broad-except
import copy
import calendar
@@ -19,6 +20,7 @@
import email.utils
from datetime import datetime, date, time, timedelta, timezone
from json import JSONEncoder
+import xml.etree.ElementTree as ET
from typing_extensions import Self
import isodate
from azure.core.exceptions import DeserializationError
@@ -123,7 +125,7 @@ def _serialize_datetime(o, format: typing.Optional[str] = None):
def _is_readonly(p):
try:
- return p._visibility == ["read"] # pylint: disable=protected-access
+ return p._visibility == ["read"]
except AttributeError:
return False
@@ -286,6 +288,12 @@ def _deserialize_decimal(attr):
return decimal.Decimal(str(attr))
+def _deserialize_int_as_str(attr):
+ if isinstance(attr, int):
+ return attr
+ return int(attr)
+
+
_DESERIALIZE_MAPPING = {
datetime: _deserialize_datetime,
date: _deserialize_date,
@@ -307,9 +315,11 @@ def _deserialize_decimal(attr):
def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
+ if annotation is int and rf and rf._format == "str":
+ return _deserialize_int_as_str
if rf and rf._format:
return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
- return _DESERIALIZE_MAPPING.get(annotation)
+ return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
def _get_type_alias_type(module_name: str, alias_name: str):
@@ -441,6 +451,10 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m
return float(o)
if isinstance(o, enum.Enum):
return o.value
+ if isinstance(o, int):
+ if format == "str":
+ return str(o)
+ return o
try:
# First try datetime.datetime
return _serialize_datetime(o, format)
@@ -471,11 +485,16 @@ def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typin
return value
if rf._is_model:
return _deserialize(rf._type, value)
+ if isinstance(value, ET.Element):
+ value = _deserialize(rf._type, value)
return _serialize(value, rf._format)
class Model(_MyMutableMapping):
_is_model = True
+ # label whether current class's _attr_to_rest_field has been calculated
+ # could not see _attr_to_rest_field directly because subclass inherits it from parent class
+ _calculated: typing.Set[str] = set()
def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
class_name = self.__class__.__name__
@@ -486,10 +505,58 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
for rest_field in self._attr_to_rest_field.values()
if rest_field._default is not _UNSET
}
- if args:
- dict_to_pass.update(
- {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()}
- )
+ if args: # pylint: disable=too-many-nested-blocks
+ if isinstance(args[0], ET.Element):
+ existed_attr_keys = []
+ model_meta = getattr(self, "_xml", {})
+
+ for rf in self._attr_to_rest_field.values():
+ prop_meta = getattr(rf, "_xml", {})
+ xml_name = prop_meta.get("name", rf._rest_name)
+ xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+
+ # attribute
+ if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name))
+ continue
+
+ # unwrapped element is array
+ if prop_meta.get("unwrapped", False):
+ # unwrapped array could either use prop items meta/prop meta
+ if prop_meta.get("itemsName"):
+ xml_name = prop_meta.get("itemsName")
+ xml_ns = prop_meta.get("itemNs")
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+ items = args[0].findall(xml_name) # pyright: ignore
+ if len(items) > 0:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, items)
+ continue
+
+ # text element is primitive type
+ if prop_meta.get("text", False):
+ if args[0].text is not None:
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text)
+ continue
+
+ # wrapped element could be normal property or array, it should only have one element
+ item = args[0].find(xml_name)
+ if item is not None:
+ existed_attr_keys.append(xml_name)
+ dict_to_pass[rf._rest_name] = _deserialize(rf._type, item)
+
+ # rest thing is additional properties
+ for e in args[0]:
+ if e.tag not in existed_attr_keys:
+ dict_to_pass[e.tag] = _convert_element(e)
+ else:
+ dict_to_pass.update(
+ {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()}
+ )
else:
non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field]
if non_attr_kwargs:
@@ -507,55 +574,70 @@ def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None:
def copy(self) -> "Model":
return Model(self.__dict__)
- def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # pylint: disable=unused-argument
- # we know the last three classes in mro are going to be 'Model', 'dict', and 'object'
- mros = cls.__mro__[:-3][::-1] # ignore model, dict, and object parents, and reverse the mro order
- attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
- k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
- }
- annotations = {
- k: v
- for mro_class in mros
- if hasattr(mro_class, "__annotations__") # pylint: disable=no-member
- for k, v in mro_class.__annotations__.items() # pylint: disable=no-member
- }
- for attr, rf in attr_to_rest_field.items():
- rf._module = cls.__module__
- if not rf._type:
- rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
- if not rf._rest_name_input:
- rf._rest_name_input = attr
- cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
+ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
+ if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated:
+ # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping',
+ # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object'
+ mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order
+ attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property
+ k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type")
+ }
+ annotations = {
+ k: v
+ for mro_class in mros
+ if hasattr(mro_class, "__annotations__")
+ for k, v in mro_class.__annotations__.items()
+ }
+ for attr, rf in attr_to_rest_field.items():
+ rf._module = cls.__module__
+ if not rf._type:
+ rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None))
+ if not rf._rest_name_input:
+ rf._rest_name_input = attr
+ cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
+ cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")
return super().__new__(cls) # pylint: disable=no-value-for-parameter
def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
for base in cls.__bases__:
- if hasattr(base, "__mapping__"): # pylint: disable=no-member
- base.__mapping__[discriminator or cls.__name__] = cls # type: ignore # pylint: disable=no-member
+ if hasattr(base, "__mapping__"):
+ base.__mapping__[discriminator or cls.__name__] = cls # type: ignore
@classmethod
- def _get_discriminator(cls, exist_discriminators) -> typing.Optional[str]:
+ def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
for v in cls.__dict__.values():
- if (
- isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators
- ): # pylint: disable=protected-access
- return v._rest_name # pylint: disable=protected-access
+ if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators:
+ return v
return None
@classmethod
def _deserialize(cls, data, exist_discriminators):
- if not hasattr(cls, "__mapping__"): # pylint: disable=no-member
+ if not hasattr(cls, "__mapping__"):
return cls(data)
discriminator = cls._get_discriminator(exist_discriminators)
- exist_discriminators.append(discriminator)
- mapped_cls = cls.__mapping__.get(data.get(discriminator), cls) # pyright: ignore # pylint: disable=no-member
- if mapped_cls == cls:
+ if discriminator is None:
return cls(data)
- return mapped_cls._deserialize(data, exist_discriminators) # pylint: disable=protected-access
+ exist_discriminators.append(discriminator._rest_name)
+ if isinstance(data, ET.Element):
+ model_meta = getattr(cls, "_xml", {})
+ prop_meta = getattr(discriminator, "_xml", {})
+ xml_name = prop_meta.get("name", discriminator._rest_name)
+ xml_ns = prop_meta.get("ns", model_meta.get("ns", None))
+ if xml_ns:
+ xml_name = "{" + xml_ns + "}" + xml_name
+
+ if data.get(xml_name) is not None:
+ discriminator_value = data.get(xml_name)
+ else:
+ discriminator_value = data.find(xml_name).text # pyright: ignore
+ else:
+ discriminator_value = data.get(discriminator._rest_name)
+ mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore
+ return mapped_cls._deserialize(data, exist_discriminators)
def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]:
- """Return a dict that can be JSONify using json.dump.
+ """Return a dict that can be turned into json using json.dump.
:keyword bool exclude_readonly: Whether to remove the readonly properties.
:returns: A dict JSON compatible object
@@ -563,6 +645,7 @@ def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.
"""
result = {}
+ readonly_props = []
if exclude_readonly:
readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)]
for k, v in self.items():
@@ -617,6 +700,8 @@ def _deserialize_dict(
):
if obj is None:
return obj
+ if isinstance(obj, ET.Element):
+ obj = {child.tag: child for child in obj}
return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()}
@@ -637,6 +722,8 @@ def _deserialize_sequence(
):
if obj is None:
return obj
+ if isinstance(obj, ET.Element):
+ obj = list(obj)
return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)
@@ -647,12 +734,12 @@ def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.An
)
-def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915, R0912
+def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches
annotation: typing.Any,
module: typing.Optional[str],
rf: typing.Optional["_RestField"] = None,
) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]:
- if not annotation or annotation in [int, float]:
+ if not annotation:
return None
# is it a type alias?
@@ -727,7 +814,6 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=R0911, R0915,
try:
if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore
if len(annotation.__args__) > 1: # pyright: ignore
-
entry_deserializers = [
_get_deserialize_callable_from_annotation(dt, module, rf)
for dt in annotation.__args__ # pyright: ignore
@@ -762,12 +848,23 @@ def _deserialize_default(
def _deserialize_with_callable(
deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]],
value: typing.Any,
-):
+): # pylint: disable=too-many-return-statements
try:
if value is None or isinstance(value, _Null):
return None
+ if isinstance(value, ET.Element):
+ if deserializer is str:
+ return value.text or ""
+ if deserializer is int:
+ return int(value.text) if value.text else None
+ if deserializer is float:
+ return float(value.text) if value.text else None
+ if deserializer is bool:
+ return value.text == "true" if value.text else None
if deserializer is None:
return value
+ if deserializer in [int, float, bool]:
+ return deserializer(value)
if isinstance(deserializer, CaseInsensitiveEnumMeta):
try:
return deserializer(value)
@@ -808,6 +905,7 @@ def __init__(
default: typing.Any = _UNSET,
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
+ xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
):
self._type = type
self._rest_name_input = name
@@ -818,6 +916,7 @@ def __init__(
self._default = default
self._format = format
self._is_multipart_file_input = is_multipart_file_input
+ self._xml = xml if xml is not None else {}
@property
def _class_type(self) -> typing.Any:
@@ -868,6 +967,7 @@ def rest_field(
default: typing.Any = _UNSET,
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
+ xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> typing.Any:
return _RestField(
name=name,
@@ -876,6 +976,7 @@ def rest_field(
default=default,
format=format,
is_multipart_file_input=is_multipart_file_input,
+ xml=xml,
)
@@ -884,5 +985,175 @@ def rest_discriminator(
name: typing.Optional[str] = None,
type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin
visibility: typing.Optional[typing.List[str]] = None,
+ xml: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> typing.Any:
- return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility)
+ return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml)
+
+
+def serialize_xml(model: Model, exclude_readonly: bool = False) -> str:
+ """Serialize a model to XML.
+
+ :param Model model: The model to serialize.
+ :param bool exclude_readonly: Whether to exclude readonly properties.
+ :returns: The XML representation of the model.
+ :rtype: str
+ """
+ return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore
+
+
+def _get_element(
+ o: typing.Any,
+ exclude_readonly: bool = False,
+ parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None,
+ wrapped_element: typing.Optional[ET.Element] = None,
+) -> typing.Union[ET.Element, typing.List[ET.Element]]:
+ if _is_model(o):
+ model_meta = getattr(o, "_xml", {})
+
+ # if prop is a model, then use the prop element directly, else generate a wrapper of model
+ if wrapped_element is None:
+ wrapped_element = _create_xml_element(
+ model_meta.get("name", o.__class__.__name__),
+ model_meta.get("prefix"),
+ model_meta.get("ns"),
+ )
+
+ readonly_props = []
+ if exclude_readonly:
+ readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)]
+
+ for k, v in o.items():
+ # do not serialize readonly properties
+ if exclude_readonly and k in readonly_props:
+ continue
+
+ prop_rest_field = _get_rest_field(o._attr_to_rest_field, k)
+ if prop_rest_field:
+ prop_meta = getattr(prop_rest_field, "_xml").copy()
+ # use the wire name as xml name if no specific name is set
+ if prop_meta.get("name") is None:
+ prop_meta["name"] = k
+ else:
+ # additional properties will not have rest field, use the wire name as xml name
+ prop_meta = {"name": k}
+
+ # if no ns for prop, use model's
+ if prop_meta.get("ns") is None and model_meta.get("ns"):
+ prop_meta["ns"] = model_meta.get("ns")
+ prop_meta["prefix"] = model_meta.get("prefix")
+
+ if prop_meta.get("unwrapped", False):
+ # unwrapped could only set on array
+ wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta))
+ elif prop_meta.get("text", False):
+ # text could only set on primitive type
+ wrapped_element.text = _get_primitive_type_value(v)
+ elif prop_meta.get("attribute", False):
+ xml_name = prop_meta.get("name", k)
+ if prop_meta.get("ns"):
+ ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore
+ xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore
+ # attribute should be primitive type
+ wrapped_element.set(xml_name, _get_primitive_type_value(v))
+ else:
+ # other wrapped prop element
+ wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta))
+ return wrapped_element
+ if isinstance(o, list):
+ return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore
+ if isinstance(o, dict):
+ result = []
+ for k, v in o.items():
+ result.append(
+ _get_wrapped_element(
+ v,
+ exclude_readonly,
+ {
+ "name": k,
+ "ns": parent_meta.get("ns") if parent_meta else None,
+ "prefix": parent_meta.get("prefix") if parent_meta else None,
+ },
+ )
+ )
+ return result
+
+ # primitive case need to create element based on parent_meta
+ if parent_meta:
+ return _get_wrapped_element(
+ o,
+ exclude_readonly,
+ {
+ "name": parent_meta.get("itemsName", parent_meta.get("name")),
+ "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")),
+ "ns": parent_meta.get("itemsNs", parent_meta.get("ns")),
+ },
+ )
+
+ raise ValueError("Could not serialize value into xml: " + o)
+
+
+def _get_wrapped_element(
+ v: typing.Any,
+ exclude_readonly: bool,
+ meta: typing.Optional[typing.Dict[str, typing.Any]],
+) -> ET.Element:
+ wrapped_element = _create_xml_element(
+ meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None
+ )
+ if isinstance(v, (dict, list)):
+ wrapped_element.extend(_get_element(v, exclude_readonly, meta))
+ elif _is_model(v):
+ _get_element(v, exclude_readonly, meta, wrapped_element)
+ else:
+ wrapped_element.text = _get_primitive_type_value(v)
+ return wrapped_element
+
+
+def _get_primitive_type_value(v) -> str:
+ if v is True:
+ return "true"
+ if v is False:
+ return "false"
+ if isinstance(v, _Null):
+ return ""
+ return str(v)
+
+
+def _create_xml_element(tag, prefix=None, ns=None):
+ if prefix and ns:
+ ET.register_namespace(prefix, ns)
+ if ns:
+ return ET.Element("{" + ns + "}" + tag)
+ return ET.Element(tag)
+
+
+def _deserialize_xml(
+ deserializer: typing.Any,
+ value: str,
+) -> typing.Any:
+ element = ET.fromstring(value) # nosec
+ return _deserialize(deserializer, element)
+
+
+def _convert_element(e: ET.Element):
+ # dict case
+ if len(e.attrib) > 0 or len({child.tag for child in e}) > 1:
+ dict_result: typing.Dict[str, typing.Any] = {}
+ for child in e:
+ if dict_result.get(child.tag) is not None:
+ if isinstance(dict_result[child.tag], list):
+ dict_result[child.tag].append(_convert_element(child))
+ else:
+ dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)]
+ else:
+ dict_result[child.tag] = _convert_element(child)
+ dict_result.update(e.attrib)
+ return dict_result
+ # array case
+ if len(e) > 0:
+ array_result: typing.List[typing.Any] = []
+ for child in e:
+ array_result.append(_convert_element(child))
+ return array_result
+ # primitive case
+ return e.text
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/__init__.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/__init__.py
index f30b11092e89..8a3952cdf768 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/__init__.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import DeidentificationClientOperationsMixin
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import DeidentificationClientOperationsMixin # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"DeidentificationClientOperationsMixin",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/_operations.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/_operations.py
index f1aea8456604..c878e855a9d0 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/_operations.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -9,7 +8,7 @@
from io import IOBase
import json
import sys
-from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, Type, TypeVar, Union, cast, overload
+from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
@@ -18,6 +17,8 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.paging import ItemPaged
@@ -36,7 +37,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
@@ -238,53 +239,8 @@ def get_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob:
:return: DeidentificationJob. The DeidentificationJob is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationJob
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -317,7 +273,10 @@ def get_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob:
if response.status_code not in [200]:
if _stream:
- response.read() # Load the body in memory and close the socket
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -339,7 +298,7 @@ def get_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob:
def _create_job_initial(
self, name: str, resource: Union[_models.DeidentificationJob, JSON, IO[bytes]], **kwargs: Any
) -> Iterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -381,30 +340,20 @@ def _create_job_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
- response.read() # Load the body in memory and close the socket
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
response_headers = {}
- if response.status_code == 200:
- response_headers["x-ms-client-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-client-request-id")
- )
- response_headers["Operation-Location"] = self._deserialize(
- "str", response.headers.get("Operation-Location")
- )
-
- deserialized = response.iter_bytes()
-
- if response.status_code == 201:
- response_headers["x-ms-client-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-client-request-id")
- )
- response_headers["Operation-Location"] = self._deserialize(
- "str", response.headers.get("Operation-Location")
- )
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -431,93 +380,6 @@ def begin_create_job(
:rtype:
~azure.core.polling.LROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- resource = {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
@overload
@@ -540,51 +402,6 @@ def begin_create_job(
:rtype:
~azure.core.polling.LROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
@overload
@@ -607,51 +424,6 @@ def begin_create_job(
:rtype:
~azure.core.polling.LROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
@distributed_trace
@@ -672,93 +444,6 @@ def begin_create_job(
:rtype:
~azure.core.polling.LROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- resource = {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
@@ -833,51 +518,6 @@ def list_jobs(
:return: An iterator like instance of DeidentificationJob
:rtype: ~azure.core.paging.ItemPaged[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
@@ -885,7 +525,7 @@ def list_jobs(
maxpagesize = kwargs.pop("maxpagesize", None)
cls: ClsType[List[_models.DeidentificationJob]] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -968,35 +608,6 @@ def list_job_documents(
:return: An iterator like instance of DocumentDetails
:rtype: ~azure.core.paging.ItemPaged[~azure.health.deidentification.models.DocumentDetails]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "id": "str",
- "input": {
- "etag": "str",
- "path": "str"
- },
- "status": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "output": {
- "etag": "str",
- "path": "str"
- }
- }
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
@@ -1004,7 +615,7 @@ def list_job_documents(
maxpagesize = kwargs.pop("maxpagesize", None)
cls: ClsType[List[_models.DocumentDetails]] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1088,53 +699,8 @@ def cancel_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob:
:return: DeidentificationJob. The DeidentificationJob is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationJob
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1167,7 +733,10 @@ def cancel_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob:
if response.status_code not in [200]:
if _stream:
- response.read() # Load the body in memory and close the socket
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1198,7 +767,7 @@ def delete_job(self, name: str, **kwargs: Any) -> None: # pylint: disable=incon
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1257,43 +826,6 @@ def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- body = {
- "inputText": "str",
- "dataType": "str",
- "operation": "str",
- "redactionFormat": "str"
- }
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
@overload
@@ -1312,35 +844,6 @@ def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
@overload
@@ -1359,35 +862,6 @@ def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
@distributed_trace
@@ -1404,45 +878,8 @@ def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- body = {
- "inputText": "str",
- "dataType": "str",
- "operation": "str",
- "redactionFormat": "str"
- }
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1484,7 +921,10 @@ def deidentify(
if response.status_code not in [200]:
if _stream:
- response.read() # Load the body in memory and close the socket
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_serialization.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_serialization.py
index 8139854b97bb..ce17d1798ce7 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_serialization.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_serialization.py
@@ -1,3 +1,4 @@
+# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -24,7 +25,6 @@
#
# --------------------------------------------------------------------------
-# pylint: skip-file
# pyright: reportUnnecessaryTypeIgnoreComment=false
from base64 import b64decode, b64encode
@@ -52,7 +52,6 @@
MutableMapping,
Type,
List,
- Mapping,
)
try:
@@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
:param data: Input, could be bytes or stream (will be decoded with UTF8) or text
:type data: str or bytes or IO
:param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
"""
if hasattr(data, "read"):
# Assume a stream
@@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type:
try:
return json.loads(data_as_str)
except ValueError as err:
- raise DeserializationError("JSON is invalid: {}".format(err), err)
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
elif "xml" in (content_type or []):
try:
@@ -155,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]],
Use bytes and headers to NOT use any requests/aiohttp or whatever
specific implementation.
Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
"""
# Try to use content-type from headers if available
content_type = None
@@ -184,15 +190,30 @@ class UTC(datetime.tzinfo):
"""Time Zone info for handling UTC"""
def utcoffset(self, dt):
- """UTF offset for UTC is 0."""
+ """UTF offset for UTC is 0.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The offset
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(0)
def tzname(self, dt):
- """Timestamp representation."""
+ """Timestamp representation.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The timestamp representation
+ :rtype: str
+ """
return "Z"
def dst(self, dt):
- """No daylight saving for UTC."""
+ """No daylight saving for UTC.
+
+ :param datetime.datetime dt: The datetime
+ :returns: The daylight saving time
+ :rtype: datetime.timedelta
+ """
return datetime.timedelta(hours=1)
@@ -206,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore
:param datetime.timedelta offset: offset in timedelta format
"""
- def __init__(self, offset):
+ def __init__(self, offset) -> None:
self.__offset = offset
def utcoffset(self, dt):
@@ -235,24 +256,26 @@ def __getinitargs__(self):
_FLATTEN = re.compile(r"(? None:
self.additional_properties: Optional[Dict[str, Any]] = {}
- for k in kwargs:
+ for k in kwargs: # pylint: disable=consider-using-dict-items
if k not in self._attribute_map:
_LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
elif k in self._validation and self._validation[k].get("readonly", False):
@@ -300,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None:
setattr(self, k, kwargs[k])
def __eq__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other: Any) -> bool:
- """Compare objects by comparing all attributes."""
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
return not self.__eq__(other)
def __str__(self) -> str:
@@ -326,7 +366,11 @@ def is_xml_model(cls) -> bool:
@classmethod
def _create_xml_node(cls):
- """Create XML node."""
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
try:
xml_map = cls._xml_map # type: ignore
except AttributeError:
@@ -346,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
def as_dict(
self,
@@ -380,12 +426,15 @@ def my_key_transformer(key, attr_desc, value):
If you want XML serialization, you can pass the kwargs is_xml=True.
+ :param bool keep_readonly: If you want to serialize the readonly attributes
:param function key_transformer: A key transformer function.
:returns: A dict JSON compatible object
:rtype: dict
"""
serializer = Serializer(self._infer_class_models())
- return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
@classmethod
def _infer_class_models(cls):
@@ -395,7 +444,7 @@ def _infer_class_models(cls):
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
if cls.__name__ not in client_models:
raise ValueError("Not Autorest generated code")
- except Exception:
+ except Exception: # pylint: disable=broad-exception-caught
# Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
client_models = {cls.__name__: cls}
return client_models
@@ -408,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
@@ -426,9 +476,11 @@ def from_dict(
and last_rest_key_case_insensitive_extractor)
:param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong
+ :rtype: ModelType
"""
deserializer = Deserializer(cls._infer_class_models())
deserializer.key_extractors = ( # type: ignore
@@ -448,21 +500,25 @@ def _flatten_subtype(cls, key, objects):
return {}
result = dict(cls._subtype_map[key])
for valuetype in cls._subtype_map[key].values():
- result.update(objects[valuetype]._flatten_subtype(key, objects))
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
return result
@classmethod
def _classify(cls, response, objects):
"""Check the class _subtype_map for any child classes.
We want to ignore any inherited _subtype_maps.
- Remove the polymorphic key from the initial data.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
"""
for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
subtype_value = None
if not isinstance(response, ET.Element):
rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
- subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None)
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
else:
subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
if subtype_value:
@@ -501,11 +557,13 @@ def _decode_attribute_map_key(key):
inside the received data.
:param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
"""
return key.replace("\\.", ".")
-class Serializer(object):
+class Serializer(object): # pylint: disable=too-many-public-methods
"""Request object model serializer."""
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
@@ -540,7 +598,7 @@ class Serializer(object):
"multiple": lambda x, y: x % y != 0,
}
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.serialize_type = {
"iso-8601": Serializer.serialize_iso,
"rfc-1123": Serializer.serialize_rfc,
@@ -560,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None):
self.key_transformer = full_restapi_key_transformer
self.client_side_validation = True
- def _serialize(self, target_obj, data_type=None, **kwargs):
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
"""Serialize data into a string according to type.
- :param target_obj: The data to be serialized.
+ :param object target_obj: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str, dict
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
"""
key_transformer = kwargs.get("key_transformer", self.key_transformer)
keep_readonly = kwargs.get("keep_readonly", False)
@@ -592,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
serialized = {}
if is_xml_model_serialization:
- serialized = target_obj._create_xml_node()
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
try:
- attributes = target_obj._attribute_map
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
for attr, attr_desc in attributes.items():
attr_name = attr
- if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False):
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
continue
if attr_name == "additional_properties" and attr_desc["key"] == "":
@@ -633,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if isinstance(new_attr, list):
serialized.extend(new_attr) # type: ignore
elif isinstance(new_attr, ET.Element):
- # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces.
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
if "name" not in getattr(orig_attr, "_xml_map", {}):
splitted_tag = new_attr.tag.split("}")
if len(splitted_tag) == 2: # Namespace
@@ -664,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
raise SerializationError(msg) from err
- else:
- return serialized
+ return serialized
def body(self, data, data_type, **kwargs):
"""Serialize data intended for a request body.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: dict
:raises: SerializationError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized request body
"""
# Just in case this is a dict
@@ -703,7 +767,7 @@ def body(self, data, data_type, **kwargs):
attribute_key_case_insensitive_extractor,
last_rest_key_case_insensitive_extractor,
]
- data = deserializer._deserialize(data_type, data)
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
except DeserializationError as err:
raise SerializationError("Unable to build a model: " + str(err)) from err
@@ -712,9 +776,11 @@ def body(self, data, data_type, **kwargs):
def url(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL path.
- :param data: The data to be serialized.
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
+ :returns: The serialized URL path
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
"""
@@ -728,21 +794,20 @@ def url(self, name, data, data_type, **kwargs):
output = output.replace("{", quote("{")).replace("}", quote("}"))
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return output
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
def query(self, name, data, data_type, **kwargs):
"""Serialize data intended for a URL query.
- :param data: The data to be serialized.
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :keyword bool skip_quote: Whether to skip quote the serialized result.
- Defaults to False.
:rtype: str, list
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized query parameter
"""
try:
# Treat the list aside, since we don't want to encode the div separator
@@ -759,19 +824,20 @@ def query(self, name, data, data_type, **kwargs):
output = str(output)
else:
output = quote(str(output), safe="")
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def header(self, name, data, data_type, **kwargs):
"""Serialize data intended for a request header.
- :param data: The data to be serialized.
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
+ :returns: The serialized header
"""
try:
if data_type in ["[str]"]:
@@ -780,21 +846,20 @@ def header(self, name, data, data_type, **kwargs):
output = self.serialize_data(data, data_type, **kwargs)
if data_type == "bool":
output = json.dumps(output)
- except SerializationError:
- raise TypeError("{} must be type {}.".format(name, data_type))
- else:
- return str(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
def serialize_data(self, data, data_type, **kwargs):
"""Serialize generic data according to supplied data type.
- :param data: The data to be serialized.
+ :param object data: The data to be serialized.
:param str data_type: The type to be serialized from.
- :param bool required: Whether it's essential that the data not be
- empty or None
:raises: AttributeError if required data is None.
:raises: ValueError if data is None
:raises: SerializationError if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
"""
if data is None:
raise ValueError("No value for given attribute")
@@ -805,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs):
if data_type in self.basic_types.values():
return self.serialize_basic(data, data_type, **kwargs)
- elif data_type in self.serialize_type:
+ if data_type in self.serialize_type:
return self.serialize_type[data_type](data, **kwargs)
# If dependencies is empty, try with current data class
@@ -821,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs):
except (ValueError, TypeError) as err:
msg = "Unable to serialize value: {!r} as type: {!r}."
raise SerializationError(msg.format(data, data_type)) from err
- else:
- return self._serialize(data, **kwargs)
+ return self._serialize(data, **kwargs)
@classmethod
- def _get_custom_serializers(cls, data_type, **kwargs):
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
if custom_serializer:
return custom_serializer
@@ -841,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs):
- basic_types_serializers dict[str, callable] : If set, use the callable as serializer
- is_xml bool : If set, use xml_basic_types_serializers
- :param data: Object to be serialized.
+ :param obj data: Object to be serialized.
:param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
- return eval(data_type)(data) # nosec
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
@classmethod
def serialize_unicode(cls, data):
"""Special handling for serializing unicode strings in Py2.
Encode to UTF-8 if unicode, otherwise handle as a str.
- :param data: Object to be serialized.
+ :param str data: Object to be serialized.
:rtype: str
+ :return: serialized object
"""
try: # If I received an enum, return its value
return data.value
@@ -871,8 +938,7 @@ def serialize_unicode(cls, data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
def serialize_iter(self, data, iter_type, div=None, **kwargs):
"""Serialize iterable.
@@ -882,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
serialization_ctxt['type'] should be same as data_type.
- is_xml bool : If set, serialize as XML
- :param list attr: Object to be serialized.
+ :param list data: Object to be serialized.
:param str iter_type: Type of object in the iterable.
- :param bool required: Whether the objects in the iterable must
- not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
- :keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
+ :return: serialized iterable
"""
if isinstance(data, str):
raise SerializationError("Refuse str type as a valid iter type.")
@@ -945,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs):
:param dict attr: Object to be serialized.
:param str dict_type: Type of object in the dictionary.
- :param bool required: Whether the objects in the dictionary must
- not be None or empty.
:rtype: dict
+ :return: serialized dictionary
"""
serialization_ctxt = kwargs.get("serialization_ctxt", {})
serialized = {}
@@ -971,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs):
return serialized
- def serialize_object(self, attr, **kwargs):
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Serialize a generic object.
This will be handled as a dictionary. If object passed in is not
a basic type (str, int, float, dict, list) it will simply be
@@ -979,6 +1042,7 @@ def serialize_object(self, attr, **kwargs):
:param dict attr: Object to be serialized.
:rtype: dict or str
+ :return: serialized object
"""
if attr is None:
return None
@@ -1003,7 +1067,7 @@ def serialize_object(self, attr, **kwargs):
return self.serialize_decimal(attr)
# If it's a model or I know this dependency, serialize as a Model
- elif obj_type in self.dependencies.values() or isinstance(attr, Model):
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
return self._serialize(attr)
if obj_type == dict:
@@ -1034,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None):
try:
enum_obj(result) # type: ignore
return result
- except ValueError:
+ except ValueError as exc:
for enum_value in enum_obj: # type: ignore
if enum_value.value.lower() == str(attr).lower():
return enum_value.value
error = "{!r} is not valid value for enum {!r}"
- raise SerializationError(error.format(attr, enum_obj))
+ raise SerializationError(error.format(attr, enum_obj)) from exc
@staticmethod
- def serialize_bytearray(attr, **kwargs):
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize bytearray into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
return b64encode(attr).decode()
@staticmethod
- def serialize_base64(attr, **kwargs):
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize str into base-64 string.
- :param attr: Object to be serialized.
+ :param str attr: Object to be serialized.
:rtype: str
+ :return: serialized base64
"""
encoded = b64encode(attr).decode("ascii")
return encoded.strip("=").replace("+", "-").replace("/", "_")
@staticmethod
- def serialize_decimal(attr, **kwargs):
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Decimal object to float.
- :param attr: Object to be serialized.
+ :param decimal attr: Object to be serialized.
:rtype: float
+ :return: serialized decimal
"""
return float(attr)
@staticmethod
- def serialize_long(attr, **kwargs):
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize long (Py2) or int (Py3).
- :param attr: Object to be serialized.
+ :param int attr: Object to be serialized.
:rtype: int/long
+ :return: serialized long
"""
return _long_type(attr)
@staticmethod
- def serialize_date(attr, **kwargs):
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Date object into ISO-8601 formatted string.
:param Date attr: Object to be serialized.
:rtype: str
+ :return: serialized date
"""
if isinstance(attr, str):
attr = isodate.parse_date(attr)
@@ -1091,11 +1160,12 @@ def serialize_date(attr, **kwargs):
return t
@staticmethod
- def serialize_time(attr, **kwargs):
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Time object into ISO-8601 formatted string.
:param datetime.time attr: Object to be serialized.
:rtype: str
+ :return: serialized time
"""
if isinstance(attr, str):
attr = isodate.parse_time(attr)
@@ -1105,30 +1175,32 @@ def serialize_time(attr, **kwargs):
return t
@staticmethod
- def serialize_duration(attr, **kwargs):
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize TimeDelta object into ISO-8601 formatted string.
:param TimeDelta attr: Object to be serialized.
:rtype: str
+ :return: serialized duration
"""
if isinstance(attr, str):
attr = isodate.parse_duration(attr)
return isodate.duration_isoformat(attr)
@staticmethod
- def serialize_rfc(attr, **kwargs):
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into RFC-1123 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: TypeError if format invalid.
+ :return: serialized rfc
"""
try:
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
utc = attr.utctimetuple()
- except AttributeError:
- raise TypeError("RFC1123 object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
Serializer.days[utc.tm_wday],
@@ -1141,12 +1213,13 @@ def serialize_rfc(attr, **kwargs):
)
@staticmethod
- def serialize_iso(attr, **kwargs):
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: SerializationError if format invalid.
+ :return: serialized iso
"""
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
@@ -1172,13 +1245,14 @@ def serialize_iso(attr, **kwargs):
raise TypeError(msg) from err
@staticmethod
- def serialize_unix(attr, **kwargs):
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
"""Serialize Datetime object into IntTime format.
This is represented as seconds.
:param Datetime attr: Object to be serialized.
:rtype: int
:raises: SerializationError if format invalid
+ :return: serialied unix
"""
if isinstance(attr, int):
return attr
@@ -1186,11 +1260,11 @@ def serialize_unix(attr, **kwargs):
if not attr.tzinfo:
_LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
return int(calendar.timegm(attr.utctimetuple()))
- except AttributeError:
- raise TypeError("Unix time object must be valid Datetime object.")
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
-def rest_key_extractor(attr, attr_desc, data):
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
key = attr_desc["key"]
working_data = data
@@ -1211,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data):
return working_data.get(key)
-def rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
key = attr_desc["key"]
working_data = data
@@ -1232,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data):
return attribute_key_case_insensitive_extractor(key, None, working_data)
-def last_rest_key_extractor(attr, attr_desc, data):
- """Extract the attribute in "data" based on the last part of the JSON path key."""
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
return attribute_key_extractor(dict_keys[-1], None, data)
-def last_rest_key_case_insensitive_extractor(attr, attr_desc, data):
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
"""Extract the attribute in "data" based on the last part of the JSON path key.
This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
"""
key = attr_desc["key"]
dict_keys = _FLATTEN.split(key)
@@ -1279,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type):
return xml_name
-def xml_key_extractor(attr, attr_desc, data):
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
if isinstance(data, dict):
return None
@@ -1331,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data):
if is_iter_type:
if is_wrapped:
return None # is_wrapped no node, we want None
- else:
- return [] # not wrapped, assume empty list
+ return [] # not wrapped, assume empty list
return None # Assume it's not there, maybe an optional node.
# If is_iter_type and not wrapped, return all found children
if is_iter_type:
if not is_wrapped:
return children
- else: # Iter and wrapped, should have found one node only (the wrap one)
- if len(children) != 1:
- raise DeserializationError(
- "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format(
- xml_name
- )
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
)
- return list(children[0]) # Might be empty list and that's ok.
+ )
+ return list(children[0]) # Might be empty list and that's ok.
# Here it's not a itertype, we should have found one element only or empty
if len(children) > 1:
@@ -1363,9 +1450,9 @@ class Deserializer(object):
basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
- valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
- def __init__(self, classes: Optional[Mapping[str, type]] = None):
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
self.deserialize_type = {
"iso-8601": Deserializer.deserialize_iso,
"rfc-1123": Deserializer.deserialize_rfc,
@@ -1403,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None):
:param str content_type: Swagger "produces" if available.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
data = self._unpack_content(response_data, content_type)
return self._deserialize(target_obj, data)
- def _deserialize(self, target_obj, data):
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
"""Call the deserializer on a model.
Data needs to be already deserialized as JSON or XML ElementTree
@@ -1416,12 +1504,13 @@ def _deserialize(self, target_obj, data):
:param object data: Object to deserialize.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
# This is already a model, go recursive just in case
if hasattr(data, "_attribute_map"):
constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
try:
- for attr, mapconfig in data._attribute_map.items():
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
if attr in constants:
continue
value = getattr(data, attr)
@@ -1440,13 +1529,13 @@ def _deserialize(self, target_obj, data):
if isinstance(response, str):
return self.deserialize_data(data, response)
- elif isinstance(response, type) and issubclass(response, Enum):
+ if isinstance(response, type) and issubclass(response, Enum):
return self.deserialize_enum(data, response)
if data is None or data is CoreNull:
return data
try:
- attributes = response._attribute_map # type: ignore
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
d_attrs = {}
for attr, attr_desc in attributes.items():
# Check empty string. If it's not empty, someone has a real "additionalProperties"...
@@ -1476,9 +1565,8 @@ def _deserialize(self, target_obj, data):
except (AttributeError, TypeError, KeyError) as err:
msg = "Unable to deserialize to object: " + class_name # type: ignore
raise DeserializationError(msg) from err
- else:
- additional_properties = self._build_additional_properties(attributes, data)
- return self._instantiate_model(response, d_attrs, additional_properties)
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
def _build_additional_properties(self, attribute_map, data):
if not self.additional_properties_detection:
@@ -1505,6 +1593,8 @@ def _classify_target(self, target, data):
:param str target: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
"""
if target is None:
return None, None
@@ -1516,7 +1606,7 @@ def _classify_target(self, target, data):
return target, target
try:
- target = target._classify(data, self.dependencies) # type: ignore
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
except AttributeError:
pass # Target is not a Model, no classify
return target, target.__class__.__name__ # type: ignore
@@ -1531,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None):
:param str target_obj: The target object type to deserialize to.
:param str/dict data: The response data to deserialize.
:param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
"""
try:
return self(target_obj, data, content_type=content_type)
- except:
+ except: # pylint: disable=bare-except
_LOGGER.debug(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
@@ -1552,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None):
If raw_data is something else, bypass all logic and return it directly.
- :param raw_data: Data to be processed.
- :param content_type: How to parse if raw_data is a string/bytes.
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
:raises JSONDecodeError: If JSON is requested and parsing is impossible.
:raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
"""
# Assume this is enough to detect a Pipeline Response without importing it
context = getattr(raw_data, "context", {})
@@ -1579,14 +1673,21 @@ def _unpack_content(raw_data, content_type=None):
def _instantiate_model(self, response, attrs, additional_properties=None):
"""Instantiate a response model passing in deserialized args.
- :param response: The response model class.
- :param d_attrs: The deserialized response attributes.
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
"""
if callable(response):
subtype = getattr(response, "_subtype_map", {})
try:
- readonly = [k for k, v in response._validation.items() if v.get("readonly")]
- const = [k for k, v in response._validation.items() if v.get("constant")]
+ readonly = [
+ k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access
+ ]
+ const = [
+ k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access
+ ]
kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
response_obj = response(**kwargs)
for attr in readonly:
@@ -1596,7 +1697,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
return response_obj
except TypeError as err:
msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
- raise DeserializationError(msg + str(err))
+ raise DeserializationError(msg + str(err)) from err
else:
try:
for attr, value in attrs.items():
@@ -1605,15 +1706,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None):
except Exception as exp:
msg = "Unable to populate response model. "
msg += "Type: {}, Error: {}".format(type(response), exp)
- raise DeserializationError(msg)
+ raise DeserializationError(msg) from exp
- def deserialize_data(self, data, data_type):
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
"""Process data for deserialization according to data type.
:param str data: The response string to be deserialized.
:param str data_type: The type to deserialize to.
:raises: DeserializationError if deserialization fails.
:return: Deserialized object.
+ :rtype: object
"""
if data is None:
return data
@@ -1627,7 +1729,11 @@ def deserialize_data(self, data, data_type):
if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
return data
- is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"]
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
return None
data_val = self.deserialize_type[data_type](data)
@@ -1647,14 +1753,14 @@ def deserialize_data(self, data, data_type):
msg = "Unable to deserialize response data."
msg += " Data: {}, {}".format(data, data_type)
raise DeserializationError(msg) from err
- else:
- return self._deserialize(obj_type, data)
+ return self._deserialize(obj_type, data)
def deserialize_iter(self, attr, iter_type):
"""Deserialize an iterable.
:param list attr: Iterable to be deserialized.
:param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
:rtype: list
"""
if attr is None:
@@ -1671,6 +1777,7 @@ def deserialize_dict(self, attr, dict_type):
:param dict/list attr: Dictionary to be deserialized. Also accepts
a list of key, value pairs.
:param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
:rtype: dict
"""
if isinstance(attr, list):
@@ -1681,11 +1788,12 @@ def deserialize_dict(self, attr, dict_type):
attr = {el.tag: el.text for el in attr}
return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
- def deserialize_object(self, attr, **kwargs):
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
"""Deserialize a generic object.
This will be handled as a dictionary.
:param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
:rtype: dict
:raises: TypeError if non-builtin datatype encountered.
"""
@@ -1720,11 +1828,10 @@ def deserialize_object(self, attr, **kwargs):
pass
return deserialized
- else:
- error = "Cannot deserialize generic object with type: "
- raise TypeError(error + str(obj_type))
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
- def deserialize_basic(self, attr, data_type):
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
"""Deserialize basic builtin data type from string.
Will attempt to convert to str, int, float and bool.
This function will also accept '1', '0', 'true' and 'false' as
@@ -1732,6 +1839,7 @@ def deserialize_basic(self, attr, data_type):
:param str attr: response string to be deserialized.
:param str data_type: deserialization data type.
+ :return: Deserialized basic type.
:rtype: str, int, float or bool
:raises: TypeError if string format is not valid.
"""
@@ -1743,24 +1851,23 @@ def deserialize_basic(self, attr, data_type):
if data_type == "str":
# None or '', node is empty string.
return ""
- else:
- # None or '', node with a strong type is None.
- # Don't try to model "empty bool" or "empty int"
- return None
+ # None or '', node with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
if data_type == "bool":
if attr in [True, False, 1, 0]:
return bool(attr)
- elif isinstance(attr, str):
+ if isinstance(attr, str):
if attr.lower() in ["true", "1"]:
return True
- elif attr.lower() in ["false", "0"]:
+ if attr.lower() in ["false", "0"]:
return False
raise TypeError("Invalid boolean value: {}".format(attr))
if data_type == "str":
return self.deserialize_unicode(attr)
- return eval(data_type)(attr) # nosec
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
@staticmethod
def deserialize_unicode(data):
@@ -1768,6 +1875,7 @@ def deserialize_unicode(data):
as a string.
:param str data: response string to be deserialized.
+ :return: Deserialized string.
:rtype: str or unicode
"""
# We might be here because we have an enum modeled as string,
@@ -1781,8 +1889,7 @@ def deserialize_unicode(data):
return data
except NameError:
return str(data)
- else:
- return str(data)
+ return str(data)
@staticmethod
def deserialize_enum(data, enum_obj):
@@ -1794,6 +1901,7 @@ def deserialize_enum(data, enum_obj):
:param str data: Response string to be deserialized. If this value is
None or invalid it will be returned as-is.
:param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
:rtype: Enum
"""
if isinstance(data, enum_obj) or data is None:
@@ -1804,9 +1912,9 @@ def deserialize_enum(data, enum_obj):
# Workaround. We might consider remove it in the future.
try:
return list(enum_obj.__members__.values())[data]
- except IndexError:
+ except IndexError as exc:
error = "{!r} is not a valid index for enum {!r}"
- raise DeserializationError(error.format(data, enum_obj))
+ raise DeserializationError(error.format(data, enum_obj)) from exc
try:
return enum_obj(str(data))
except ValueError:
@@ -1822,6 +1930,7 @@ def deserialize_bytearray(attr):
"""Deserialize string into bytearray.
:param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1834,6 +1943,7 @@ def deserialize_base64(attr):
"""Deserialize base64 encoded string into string.
:param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
:rtype: bytearray
:raises: TypeError if string format invalid.
"""
@@ -1849,8 +1959,9 @@ def deserialize_decimal(attr):
"""Deserialize string into Decimal object.
:param str attr: response string to be deserialized.
- :rtype: Decimal
+ :return: Deserialized decimal
:raises: DeserializationError if string format invalid.
+ :rtype: decimal
"""
if isinstance(attr, ET.Element):
attr = attr.text
@@ -1865,6 +1976,7 @@ def deserialize_long(attr):
"""Deserialize string into long (Py2) or int (Py3).
:param str attr: response string to be deserialized.
+ :return: Deserialized int
:rtype: long or int
:raises: ValueError if string format invalid.
"""
@@ -1877,6 +1989,7 @@ def deserialize_duration(attr):
"""Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
+ :return: Deserialized duration
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
"""
@@ -1887,14 +2000,14 @@ def deserialize_duration(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise DeserializationError(msg) from err
- else:
- return duration
+ return duration
@staticmethod
def deserialize_date(attr):
"""Deserialize ISO-8601 formatted string into Date object.
:param str attr: response string to be deserialized.
+ :return: Deserialized date
:rtype: Date
:raises: DeserializationError if string format invalid.
"""
@@ -1910,6 +2023,7 @@ def deserialize_time(attr):
"""Deserialize ISO-8601 formatted string into time object.
:param str attr: response string to be deserialized.
+ :return: Deserialized time
:rtype: datetime.time
:raises: DeserializationError if string format invalid.
"""
@@ -1924,6 +2038,7 @@ def deserialize_rfc(attr):
"""Deserialize RFC-1123 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1939,14 +2054,14 @@ def deserialize_rfc(attr):
except ValueError as err:
msg = "Cannot deserialize to rfc datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_iso(attr):
"""Deserialize ISO-8601 formatted string into Datetime object.
:param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
:rtype: Datetime
:raises: DeserializationError if string format invalid.
"""
@@ -1976,8 +2091,7 @@ def deserialize_iso(attr):
except (ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
@staticmethod
def deserialize_unix(attr):
@@ -1985,6 +2099,7 @@ def deserialize_unix(attr):
This is represented as seconds.
:param int attr: Object to be serialized.
+ :return: Deserialized datetime
:rtype: Datetime
:raises: DeserializationError if format invalid
"""
@@ -1996,5 +2111,4 @@ def deserialize_unix(attr):
except ValueError as err:
msg = "Cannot deserialize to unix datetime object."
raise DeserializationError(msg) from err
- else:
- return date_obj
+ return date_obj
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_vendor.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_vendor.py
index 6dbcb5c20a91..5af45cbb9df5 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_vendor.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_vendor.py
@@ -11,7 +11,6 @@
from ._configuration import DeidentificationClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import PipelineClient
from ._serialization import Deserializer, Serializer
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_version.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_version.py
index bbcd28b4aa67..be71c81bd282 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_version.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/_version.py
@@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-VERSION = "1.0.0b2"
+VERSION = "1.0.0b1"
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/__init__.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/__init__.py
index 245e207d364a..432fe8a82dba 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/__init__.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/__init__.py
@@ -5,12 +5,18 @@
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._client import DeidentificationClient
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._client import DeidentificationClient # type: ignore
try:
from ._patch import __all__ as _patch_all
- from ._patch import * # pylint: disable=unused-wildcard-import
+ from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
@@ -18,6 +24,6 @@
__all__ = [
"DeidentificationClient",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_client.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_client.py
index b257b9201e01..a109638940be 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_client.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_client.py
@@ -19,13 +19,10 @@
from ._operations import DeidentificationClientOperationsMixin
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DeidentificationClient(
- DeidentificationClientOperationsMixin
-): # pylint: disable=client-accepts-api-version-keyword
+class DeidentificationClient(DeidentificationClientOperationsMixin):
"""DeidentificationClient.
:param endpoint: Url of your De-identification Service. Required.
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_configuration.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_configuration.py
index 3799c4c1d7b2..f52f44a035e0 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_configuration.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_configuration.py
@@ -13,11 +13,10 @@
from .._version import VERSION
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
-class DeidentificationClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
+class DeidentificationClientConfiguration: # pylint: disable=too-many-instance-attributes
"""Configuration for DeidentificationClient.
Note that all parameters used to create this instance are saved as instance
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/__init__.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/__init__.py
index f30b11092e89..8a3952cdf768 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/__init__.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/__init__.py
@@ -5,15 +5,21 @@
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._operations import DeidentificationClientOperationsMixin
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._operations import DeidentificationClientOperationsMixin # type: ignore
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"DeidentificationClientOperationsMixin",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/_operations.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/_operations.py
index dd3a7c4dbdcb..94ad6c1b0234 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/_operations.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_operations/_operations.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-lines,too-many-statements
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
@@ -9,21 +8,7 @@
from io import IOBase
import json
import sys
-from typing import (
- Any,
- AsyncIterable,
- AsyncIterator,
- Callable,
- Dict,
- IO,
- List,
- Optional,
- Type,
- TypeVar,
- Union,
- cast,
- overload,
-)
+from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
@@ -33,6 +18,8 @@
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
@@ -59,7 +46,7 @@
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
- from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
+ from typing import MutableMapping # type: ignore
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
@@ -78,53 +65,8 @@ async def get_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob
:return: DeidentificationJob. The DeidentificationJob is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationJob
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -157,7 +99,10 @@ async def get_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob
if response.status_code not in [200]:
if _stream:
- await response.read() # Load the body in memory and close the socket
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -179,7 +124,7 @@ async def get_job(self, name: str, **kwargs: Any) -> _models.DeidentificationJob
async def _create_job_initial(
self, name: str, resource: Union[_models.DeidentificationJob, JSON, IO[bytes]], **kwargs: Any
) -> AsyncIterator[bytes]:
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -221,30 +166,20 @@ async def _create_job_initial(
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
- await response.read() # Load the body in memory and close the socket
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
response_headers = {}
- if response.status_code == 200:
- response_headers["x-ms-client-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-client-request-id")
- )
- response_headers["Operation-Location"] = self._deserialize(
- "str", response.headers.get("Operation-Location")
- )
-
- deserialized = response.iter_bytes()
-
- if response.status_code == 201:
- response_headers["x-ms-client-request-id"] = self._deserialize(
- "str", response.headers.get("x-ms-client-request-id")
- )
- response_headers["Operation-Location"] = self._deserialize(
- "str", response.headers.get("Operation-Location")
- )
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))
- deserialized = response.iter_bytes()
+ deserialized = response.iter_bytes()
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
@@ -271,93 +206,6 @@ async def begin_create_job(
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- resource = {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
@overload
@@ -380,51 +228,6 @@ async def begin_create_job(
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
@overload
@@ -447,51 +250,6 @@ async def begin_create_job(
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
@distributed_trace_async
@@ -512,93 +270,6 @@ async def begin_create_job(
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- resource = {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
-
- # response body for status code(s): 201, 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}
@@ -675,51 +346,6 @@ def list_jobs(
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.health.deidentification.models.DeidentificationJob]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
@@ -727,7 +353,7 @@ def list_jobs(
maxpagesize = kwargs.pop("maxpagesize", None)
cls: ClsType[List[_models.DeidentificationJob]] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -811,35 +437,6 @@ def list_job_documents(
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.health.deidentification.models.DocumentDetails]
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "id": "str",
- "input": {
- "etag": "str",
- "path": "str"
- },
- "status": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "output": {
- "etag": "str",
- "path": "str"
- }
- }
"""
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
@@ -847,7 +444,7 @@ def list_job_documents(
maxpagesize = kwargs.pop("maxpagesize", None)
cls: ClsType[List[_models.DocumentDetails]] = kwargs.pop("cls", None)
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -931,53 +528,8 @@ async def cancel_job(self, name: str, **kwargs: Any) -> _models.Deidentification
:return: DeidentificationJob. The DeidentificationJob is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationJob
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "createdAt": "2020-02-20 00:00:00",
- "lastUpdatedAt": "2020-02-20 00:00:00",
- "name": "str",
- "sourceLocation": {
- "location": "str",
- "prefix": "str",
- "extensions": [
- "str"
- ]
- },
- "status": "str",
- "targetLocation": {
- "location": "str",
- "prefix": "str"
- },
- "dataType": "str",
- "error": {
- "code": "str",
- "message": "str",
- "details": [
- ...
- ],
- "innererror": {
- "code": "str",
- "innererror": ...
- },
- "target": "str"
- },
- "operation": "str",
- "redactionFormat": "str",
- "startedAt": "2020-02-20 00:00:00",
- "summary": {
- "bytesProcessed": 0,
- "canceled": 0,
- "failed": 0,
- "successful": 0,
- "total": 0
- }
- }
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1010,7 +562,10 @@ async def cancel_job(self, name: str, **kwargs: Any) -> _models.Deidentification
if response.status_code not in [200]:
if _stream:
- await response.read() # Load the body in memory and close the socket
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
@@ -1030,7 +585,7 @@ async def cancel_job(self, name: str, **kwargs: Any) -> _models.Deidentification
return deserialized # type: ignore
@distributed_trace_async
- async def delete_job(self, name: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements
+ async def delete_job(self, name: str, **kwargs: Any) -> None:
"""Delete a de-identification job.
Removes the record of the job from the service. Does not delete any documents.
@@ -1041,7 +596,7 @@ async def delete_job(self, name: str, **kwargs: Any) -> None: # pylint: disable
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1100,43 +655,6 @@ async def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- body = {
- "inputText": "str",
- "dataType": "str",
- "operation": "str",
- "redactionFormat": "str"
- }
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
@overload
@@ -1155,35 +673,6 @@ async def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
@overload
@@ -1202,35 +691,6 @@ async def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
@distributed_trace_async
@@ -1247,45 +707,8 @@ async def deidentify(
:return: DeidentificationResult. The DeidentificationResult is compatible with MutableMapping
:rtype: ~azure.health.deidentification.models.DeidentificationResult
:raises ~azure.core.exceptions.HttpResponseError:
-
- Example:
- .. code-block:: python
-
- # JSON input template you can fill out and use as your body input.
- body = {
- "inputText": "str",
- "dataType": "str",
- "operation": "str",
- "redactionFormat": "str"
- }
-
- # response body for status code(s): 200
- response == {
- "outputText": "str",
- "taggerResult": {
- "entities": [
- {
- "category": "str",
- "length": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "offset": {
- "codePoint": 0,
- "utf16": 0,
- "utf8": 0
- },
- "confidenceScore": 0.0,
- "text": "str"
- }
- ],
- "etag": "str",
- "path": "str"
- }
- }
"""
- error_map: MutableMapping[int, Type[HttpResponseError]] = {
+ error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
@@ -1327,7 +750,10 @@ async def deidentify(
if response.status_code not in [200]:
if _stream:
- await response.read() # Load the body in memory and close the socket
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_vendor.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_vendor.py
index 39bc7460b3a7..0afc83d417cc 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_vendor.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/aio/_vendor.py
@@ -11,7 +11,6 @@
from ._configuration import DeidentificationClientConfiguration
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from azure.core import AsyncPipelineClient
from .._serialization import Deserializer, Serializer
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/__init__.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/__init__.py
index 2bbbe6e08cab..55d5232ea946 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/__init__.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/__init__.py
@@ -5,28 +5,37 @@
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
-from ._models import DeidentificationContent
-from ._models import DeidentificationJob
-from ._models import DeidentificationResult
-from ._models import DocumentDetails
-from ._models import DocumentLocation
-from ._models import Error
-from ._models import InnerError
-from ._models import JobSummary
-from ._models import PhiEntity
-from ._models import PhiTaggerResult
-from ._models import SourceStorageLocation
-from ._models import StringIndex
-from ._models import TargetStorageLocation
+from typing import TYPE_CHECKING
-from ._enums import DocumentDataType
-from ._enums import JobStatus
-from ._enums import OperationState
-from ._enums import OperationType
-from ._enums import PhiCategory
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models import ( # type: ignore
+ DeidentificationContent,
+ DeidentificationJob,
+ DeidentificationResult,
+ DocumentDetails,
+ DocumentLocation,
+ JobSummary,
+ PhiEntity,
+ PhiTaggerResult,
+ SourceStorageLocation,
+ StringIndex,
+ TargetStorageLocation,
+)
+
+from ._enums import ( # type: ignore
+ DocumentDataType,
+ JobStatus,
+ OperationState,
+ OperationType,
+ PhiCategory,
+)
from ._patch import __all__ as _patch_all
-from ._patch import * # pylint: disable=unused-wildcard-import
+from ._patch import *
from ._patch import patch_sdk as _patch_sdk
__all__ = [
@@ -35,8 +44,6 @@
"DeidentificationResult",
"DocumentDetails",
"DocumentLocation",
- "Error",
- "InnerError",
"JobSummary",
"PhiEntity",
"PhiTaggerResult",
@@ -49,5 +56,5 @@
"OperationType",
"PhiCategory",
]
-__all__.extend([p for p in _patch_all if p not in __all__])
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_enums.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_enums.py
index c05e0003b5b7..8051b431ccd1 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_enums.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_enums.py
@@ -70,7 +70,7 @@ class PhiCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Account Number."""
AGE = "Age"
"""Age."""
- BIO_I_D = "BioID"
+ BIO_ID = "BioID"
"""Biological Identifier, such as a fingerprint or retinal scan."""
CITY = "City"
"""City."""
@@ -90,9 +90,9 @@ class PhiCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Health Plan ID Numbers."""
HOSPITAL = "Hospital"
"""Hospital Name."""
- I_D_NUM = "IDNum"
+ ID_NUM = "IDNum"
"""Id Number, eg. passport number."""
- I_P_ADDRESS = "IPAddress"
+ IP_ADDRESS = "IPAddress"
"""IP Address."""
LICENSE = "License"
"""License, eg. Driver's license or medical license."""
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_models.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_models.py
index 2929c7e9b5d3..5032d7d75aeb 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_models.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_models.py
@@ -1,20 +1,21 @@
# coding=utf-8
-# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
+# pylint: disable=useless-super-delegation
import datetime
from typing import Any, List, Mapping, Optional, TYPE_CHECKING, Union, overload
+from azure.core.exceptions import ODataV4Format
+
from .. import _model_base
from .._model_base import rest_field
if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
@@ -52,25 +53,24 @@ def __init__(
operation: Optional[Union[str, "_models.OperationType"]] = None,
data_type: Optional[Union[str, "_models.DocumentDataType"]] = None,
redaction_format: Optional[str] = None,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
-class DeidentificationJob(_model_base.Model): # pylint: disable=too-many-instance-attributes
+class DeidentificationJob(_model_base.Model):
"""A job containing a batch of documents to de-identify.
Readonly variables are only populated by the server, and will be ignored when sending a request.
- All required parameters must be populated in order to send to server.
:ivar name: The name of a job. Required.
:vartype name: str
@@ -89,7 +89,7 @@ class DeidentificationJob(_model_base.Model): # pylint: disable=too-many-instan
"Succeeded", "PartialFailed", "Failed", and "Canceled".
:vartype status: str or ~azure.health.deidentification.models.JobStatus
:ivar error: Error when job fails in it's entirety.
- :vartype error: ~azure.health.deidentification.models.Error
+ :vartype error: ~azure.core.ODataV4Format
:ivar last_updated_at: Date and time when the job was completed.
If the job is canceled, this is the time when the job was canceled.
@@ -120,7 +120,7 @@ class DeidentificationJob(_model_base.Model): # pylint: disable=too-many-instan
status: Union[str, "_models.JobStatus"] = rest_field(visibility=["read"])
"""Current status of a job. Required. Known values are: \"NotStarted\", \"Running\",
\"Succeeded\", \"PartialFailed\", \"Failed\", and \"Canceled\"."""
- error: Optional["_models.Error"] = rest_field(visibility=["read"])
+ error: Optional[ODataV4Format] = rest_field(visibility=["read"])
"""Error when job fails in it's entirety."""
last_updated_at: datetime.datetime = rest_field(name="lastUpdatedAt", visibility=["read"], format="rfc3339")
"""Date and time when the job was completed.
@@ -144,16 +144,16 @@ def __init__(
operation: Optional[Union[str, "_models.OperationType"]] = None,
data_type: Optional[Union[str, "_models.DocumentDataType"]] = None,
redaction_format: Optional[str] = None,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@@ -177,16 +177,16 @@ def __init__(
*,
output_text: Optional[str] = None,
tagger_result: Optional["_models.PhiTaggerResult"] = None,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@@ -206,7 +206,7 @@ class DocumentDetails(_model_base.Model):
"Succeeded", "Failed", and "Canceled".
:vartype status: str or ~azure.health.deidentification.models.OperationState
:ivar error: Error when document fails.
- :vartype error: ~azure.health.deidentification.models.Error
+ :vartype error: ~azure.core.ODataV4Format
"""
id: str = rest_field(visibility=["read"])
@@ -218,7 +218,7 @@ class DocumentDetails(_model_base.Model):
status: Union[str, "_models.OperationState"] = rest_field()
"""Status of the document. Required. Known values are: \"NotStarted\", \"Running\", \"Succeeded\",
\"Failed\", and \"Canceled\"."""
- error: Optional["_models.Error"] = rest_field()
+ error: Optional[ODataV4Format] = rest_field()
"""Error when document fails."""
@overload
@@ -228,17 +228,17 @@ def __init__(
input: "_models.DocumentLocation",
status: Union[str, "_models.OperationState"],
output: Optional["_models.DocumentLocation"] = None,
- error: Optional["_models.Error"] = None,
- ): ...
+ error: Optional[ODataV4Format] = None,
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@@ -264,102 +264,16 @@ def __init__(
self,
*,
path: str,
- ): ...
-
- @overload
- def __init__(self, mapping: Mapping[str, Any]):
- """
- :param mapping: raw JSON to initialize the model.
- :type mapping: Mapping[str, Any]
- """
-
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
- super().__init__(*args, **kwargs)
-
-
-class Error(_model_base.Model):
- """The error object.
-
- All required parameters must be populated in order to send to server.
-
- :ivar code: One of a server-defined set of error codes. Required.
- :vartype code: str
- :ivar message: A human-readable representation of the error. Required.
- :vartype message: str
- :ivar target: The target of the error.
- :vartype target: str
- :ivar details: An array of details about specific errors that led to this reported error.
- :vartype details: list[~azure.health.deidentification.models.Error]
- :ivar innererror: An object containing more specific information than the current object about
- the error.
- :vartype innererror: ~azure.health.deidentification.models.InnerError
- """
-
- code: str = rest_field()
- """One of a server-defined set of error codes. Required."""
- message: str = rest_field()
- """A human-readable representation of the error. Required."""
- target: Optional[str] = rest_field()
- """The target of the error."""
- details: Optional[List["_models.Error"]] = rest_field()
- """An array of details about specific errors that led to this reported error."""
- innererror: Optional["_models.InnerError"] = rest_field()
- """An object containing more specific information than the current object about the error."""
-
- @overload
- def __init__(
- self,
- *,
- code: str,
- message: str,
- target: Optional[str] = None,
- details: Optional[List["_models.Error"]] = None,
- innererror: Optional["_models.InnerError"] = None,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
- super().__init__(*args, **kwargs)
-
-
-class InnerError(_model_base.Model):
- """An object containing more specific information about the error. As per Microsoft One API
- guidelines -
- https://github.com/Microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses.
-
- :ivar code: One of a server-defined set of error codes.
- :vartype code: str
- :ivar innererror: Inner error.
- :vartype innererror: ~azure.health.deidentification.models.InnerError
- """
-
- code: Optional[str] = rest_field()
- """One of a server-defined set of error codes."""
- innererror: Optional["_models.InnerError"] = rest_field()
- """Inner error."""
-
- @overload
- def __init__(
- self,
- *,
- code: Optional[str] = None,
- innererror: Optional["_models.InnerError"] = None,
- ): ...
-
- @overload
- def __init__(self, mapping: Mapping[str, Any]):
- """
- :param mapping: raw JSON to initialize the model.
- :type mapping: Mapping[str, Any]
- """
-
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@@ -399,16 +313,16 @@ def __init__(
canceled: int,
total: int,
bytes_processed: int,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@@ -456,16 +370,16 @@ def __init__(
length: "_models.StringIndex",
text: Optional[str] = None,
confidence_score: Optional[float] = None,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@@ -495,23 +409,22 @@ def __init__(
entities: List["_models.PhiEntity"],
path: Optional[str] = None,
etag: Optional[str] = None,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class SourceStorageLocation(_model_base.Model):
"""Storage location.
- All required parameters must be populated in order to send to server.
:ivar location: URL to storage location. Required.
:vartype location: str
@@ -535,16 +448,16 @@ def __init__(
location: str,
prefix: str,
extensions: Optional[List[str]] = None,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@@ -582,23 +495,22 @@ def __init__(
utf8: int,
utf16: int,
code_point: int,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class TargetStorageLocation(_model_base.Model):
"""Storage location.
- All required parameters must be populated in order to send to server.
:ivar location: URL to storage location. Required.
:vartype location: str
@@ -617,14 +529,14 @@ def __init__(
*,
location: str,
prefix: str,
- ): ...
+ ) -> None: ...
@overload
- def __init__(self, mapping: Mapping[str, Any]):
+ def __init__(self, mapping: Mapping[str, Any]) -> None:
"""
:param mapping: raw JSON to initialize the model.
:type mapping: Mapping[str, Any]
"""
- def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_patch.py b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_patch.py
index 807491528291..f7dd32510333 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_patch.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/azure/health/deidentification/models/_patch.py
@@ -6,11 +6,7 @@
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
"""
-from typing import TYPE_CHECKING, List
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from .. import models as _models
+from typing import List
__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/cancel_job.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/cancel_job.py
new file mode 100644
index 000000000000..4e2573b0eea9
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/cancel_job.py
@@ -0,0 +1,41 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.health.deidentification import DeidentificationClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-health-deidentification
+# USAGE
+ python cancel_job.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DeidentificationClient(
+ endpoint="ENDPOINT",
+ credential=DefaultAzureCredential(),
+ )
+
+ response = client.cancel_job(
+ name="documents_smith_1",
+ )
+ print(response)
+
+
+# x-ms-original-file: 2024-07-12-preview/CancelJob.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/create_job.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/create_job.py
new file mode 100644
index 000000000000..7cfc11583ee1
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/create_job.py
@@ -0,0 +1,56 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.health.deidentification import DeidentificationClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-health-deidentification
+# USAGE
+ python create_job.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DeidentificationClient(
+ endpoint="ENDPOINT",
+ credential=DefaultAzureCredential(),
+ )
+
+ response = client.begin_create_job(
+ name="documents_smith_1",
+ resource={
+ "dataType": "Plaintext",
+ "operation": "Redact",
+ "redactionFormat": "[{type}]",
+ "sourceLocation": {
+ "extensions": ["*"],
+ "location": "https://blobtest.blob.core.windows.net/container?sp=r&st=2024-01-24T18:11:10Z&se=2024-01-25T02:11:10Z&spr=https&sv=2022-11-02&sr=c&sig=signature%3D",
+ "prefix": "/documents",
+ },
+ "status": "NotStarted",
+ "targetLocation": {
+ "location": "https://blobtest.blob.core.windows.net/container?sp=r&st=2024-01-24T18:11:10Z&se=2024-01-25T02:11:10Z&spr=https&sv=2022-11-02&sr=c&sig=signature%3D",
+ "prefix": "/documents",
+ },
+ },
+ ).result()
+ print(response)
+
+
+# x-ms-original-file: 2024-07-12-preview/CreateJob.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/deidentify.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/deidentify.py
new file mode 100644
index 000000000000..fba868d8092a
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/deidentify.py
@@ -0,0 +1,46 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.health.deidentification import DeidentificationClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-health-deidentification
+# USAGE
+ python deidentify.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DeidentificationClient(
+ endpoint="ENDPOINT",
+ credential=DefaultAzureCredential(),
+ )
+
+ response = client.deidentify(
+ body={
+ "dataType": "Plaintext",
+ "inputText": "Hello my name is John Smith.",
+ "operation": "Redact",
+ "redactionFormat": "[{type}]",
+ },
+ )
+ print(response)
+
+
+# x-ms-original-file: 2024-07-12-preview/Deidentify.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/delete_job.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/delete_job.py
new file mode 100644
index 000000000000..eb50a2436ec9
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/delete_job.py
@@ -0,0 +1,40 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.health.deidentification import DeidentificationClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-health-deidentification
+# USAGE
+ python delete_job.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DeidentificationClient(
+ endpoint="ENDPOINT",
+ credential=DefaultAzureCredential(),
+ )
+
+ client.delete_job(
+ name="documents_smith_1",
+ )
+
+
+# x-ms-original-file: 2024-07-12-preview/DeleteJob.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/get_job.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/get_job.py
new file mode 100644
index 000000000000..61b1f8957eee
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/get_job.py
@@ -0,0 +1,41 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.health.deidentification import DeidentificationClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-health-deidentification
+# USAGE
+ python get_job.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DeidentificationClient(
+ endpoint="ENDPOINT",
+ credential=DefaultAzureCredential(),
+ )
+
+ response = client.get_job(
+ name="documents_smith_1",
+ )
+ print(response)
+
+
+# x-ms-original-file: 2024-07-12-preview/GetJob.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/list_job_documents.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/list_job_documents.py
new file mode 100644
index 000000000000..ccadfdc4eff3
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/list_job_documents.py
@@ -0,0 +1,42 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.health.deidentification import DeidentificationClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-health-deidentification
+# USAGE
+ python list_job_documents.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DeidentificationClient(
+ endpoint="ENDPOINT",
+ credential=DefaultAzureCredential(),
+ )
+
+ response = client.list_job_documents(
+ name="documents_smith_1",
+ )
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: 2024-07-12-preview/ListJobDocuments.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/list_jobs.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/list_jobs.py
new file mode 100644
index 000000000000..aca2176008d0
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_samples/list_jobs.py
@@ -0,0 +1,40 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.identity import DefaultAzureCredential
+
+from azure.health.deidentification import DeidentificationClient
+
+"""
+# PREREQUISITES
+ pip install azure-identity
+ pip install azure-health-deidentification
+# USAGE
+ python list_jobs.py
+
+ Before run the sample, please set the values of the client ID, tenant ID and client secret
+ of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
+ AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
+ https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
+"""
+
+
+def main():
+ client = DeidentificationClient(
+ endpoint="ENDPOINT",
+ credential=DefaultAzureCredential(),
+ )
+
+ response = client.list_jobs()
+ for item in response:
+ print(item)
+
+
+# x-ms-original-file: 2024-07-12-preview/ListJobs.json
+if __name__ == "__main__":
+ main()
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/conftest.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/conftest.py
new file mode 100644
index 000000000000..23dfda522b5b
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/conftest.py
@@ -0,0 +1,39 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import os
+import pytest
+from dotenv import load_dotenv
+from devtools_testutils import (
+ test_proxy,
+ add_general_regex_sanitizer,
+ add_body_key_sanitizer,
+ add_header_regex_sanitizer,
+)
+
+load_dotenv()
+
+
+# For security, please avoid record sensitive identity information in recordings
+@pytest.fixture(scope="session", autouse=True)
+def add_sanitizers(test_proxy):
+ deidentification_subscription_id = os.environ.get(
+ "DEIDENTIFICATION_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000"
+ )
+ deidentification_tenant_id = os.environ.get("DEIDENTIFICATION_TENANT_ID", "00000000-0000-0000-0000-000000000000")
+ deidentification_client_id = os.environ.get("DEIDENTIFICATION_CLIENT_ID", "00000000-0000-0000-0000-000000000000")
+ deidentification_client_secret = os.environ.get(
+ "DEIDENTIFICATION_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000"
+ )
+ add_general_regex_sanitizer(regex=deidentification_subscription_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=deidentification_tenant_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=deidentification_client_id, value="00000000-0000-0000-0000-000000000000")
+ add_general_regex_sanitizer(regex=deidentification_client_secret, value="00000000-0000-0000-0000-000000000000")
+
+ add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
+ add_header_regex_sanitizer(key="Cookie", value="cookie;")
+ add_body_key_sanitizer(json_path="$..access_token", value="access_token")
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/test_deidentification.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/test_deidentification.py
new file mode 100644
index 000000000000..b08c816eb2bd
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/test_deidentification.py
@@ -0,0 +1,102 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from devtools_testutils import recorded_by_proxy
+from testpreparer import DeidentificationClientTestBase, DeidentificationPreparer
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDeidentification(DeidentificationClientTestBase):
+ @DeidentificationPreparer()
+ @recorded_by_proxy
+ def test_get_job(self, deidentification_endpoint):
+ client = self.create_client(endpoint=deidentification_endpoint)
+ response = client.get_job(
+ name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy
+ def test_begin_create_job(self, deidentification_endpoint):
+ client = self.create_client(endpoint=deidentification_endpoint)
+ response = client.begin_create_job(
+ name="str",
+ resource={
+ "createdAt": "2020-02-20 00:00:00",
+ "lastUpdatedAt": "2020-02-20 00:00:00",
+ "name": "str",
+ "sourceLocation": {"location": "str", "prefix": "str", "extensions": ["str"]},
+ "status": "str",
+ "targetLocation": {"location": "str", "prefix": "str"},
+ "dataType": "str",
+ "error": ~azure.core.ODataV4Format,
+ "operation": "str",
+ "redactionFormat": "str",
+ "startedAt": "2020-02-20 00:00:00",
+ "summary": {"bytesProcessed": 0, "canceled": 0, "failed": 0, "successful": 0, "total": 0},
+ },
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy
+ def test_list_jobs(self, deidentification_endpoint):
+ client = self.create_client(endpoint=deidentification_endpoint)
+ response = client.list_jobs()
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy
+ def test_list_job_documents(self, deidentification_endpoint):
+ client = self.create_client(endpoint=deidentification_endpoint)
+ response = client.list_job_documents(
+ name="str",
+ )
+ result = [r for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy
+ def test_cancel_job(self, deidentification_endpoint):
+ client = self.create_client(endpoint=deidentification_endpoint)
+ response = client.cancel_job(
+ name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy
+ def test_delete_job(self, deidentification_endpoint):
+ client = self.create_client(endpoint=deidentification_endpoint)
+ response = client.delete_job(
+ name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy
+ def test_deidentify(self, deidentification_endpoint):
+ client = self.create_client(endpoint=deidentification_endpoint)
+ response = client.deidentify(
+ body={"inputText": "str", "dataType": "str", "operation": "str", "redactionFormat": "str"},
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/test_deidentification_async.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/test_deidentification_async.py
new file mode 100644
index 000000000000..f79ae817dec3
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/test_deidentification_async.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import pytest
+from devtools_testutils.aio import recorded_by_proxy_async
+from testpreparer import DeidentificationPreparer
+from testpreparer_async import DeidentificationClientTestBaseAsync
+
+
+@pytest.mark.skip("you may need to update the auto-generated test case before run it")
+class TestDeidentificationAsync(DeidentificationClientTestBaseAsync):
+ @DeidentificationPreparer()
+ @recorded_by_proxy_async
+ async def test_get_job(self, deidentification_endpoint):
+ client = self.create_async_client(endpoint=deidentification_endpoint)
+ response = await client.get_job(
+ name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy_async
+ async def test_begin_create_job(self, deidentification_endpoint):
+ client = self.create_async_client(endpoint=deidentification_endpoint)
+ response = await (
+ await client.begin_create_job(
+ name="str",
+ resource={
+ "createdAt": "2020-02-20 00:00:00",
+ "lastUpdatedAt": "2020-02-20 00:00:00",
+ "name": "str",
+ "sourceLocation": {"location": "str", "prefix": "str", "extensions": ["str"]},
+ "status": "str",
+ "targetLocation": {"location": "str", "prefix": "str"},
+ "dataType": "str",
+ "error": ~azure.core.ODataV4Format,
+ "operation": "str",
+ "redactionFormat": "str",
+ "startedAt": "2020-02-20 00:00:00",
+ "summary": {"bytesProcessed": 0, "canceled": 0, "failed": 0, "successful": 0, "total": 0},
+ },
+ )
+ ).result() # call '.result()' to poll until service return final result
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy_async
+ async def test_list_jobs(self, deidentification_endpoint):
+ client = self.create_async_client(endpoint=deidentification_endpoint)
+ response = client.list_jobs()
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy_async
+ async def test_list_job_documents(self, deidentification_endpoint):
+ client = self.create_async_client(endpoint=deidentification_endpoint)
+ response = client.list_job_documents(
+ name="str",
+ )
+ result = [r async for r in response]
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy_async
+ async def test_cancel_job(self, deidentification_endpoint):
+ client = self.create_async_client(endpoint=deidentification_endpoint)
+ response = await client.cancel_job(
+ name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy_async
+ async def test_delete_job(self, deidentification_endpoint):
+ client = self.create_async_client(endpoint=deidentification_endpoint)
+ response = await client.delete_job(
+ name="str",
+ )
+
+ # please add some check logic here by yourself
+ # ...
+
+ @DeidentificationPreparer()
+ @recorded_by_proxy_async
+ async def test_deidentify(self, deidentification_endpoint):
+ client = self.create_async_client(endpoint=deidentification_endpoint)
+ response = await client.deidentify(
+ body={"inputText": "str", "dataType": "str", "operation": "str", "redactionFormat": "str"},
+ )
+
+ # please add some check logic here by yourself
+ # ...
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/testpreparer.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/testpreparer.py
new file mode 100644
index 000000000000..015d30dbf7af
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/testpreparer.py
@@ -0,0 +1,26 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from azure.health.deidentification import DeidentificationClient
+from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer
+import functools
+
+
+class DeidentificationClientTestBase(AzureRecordedTestCase):
+
+ def create_client(self, endpoint):
+ credential = self.get_credential(DeidentificationClient)
+ return self.create_client_from_credential(
+ DeidentificationClient,
+ credential=credential,
+ endpoint=endpoint,
+ )
+
+
+DeidentificationPreparer = functools.partial(
+ PowerShellPreparer, "deidentification", deidentification_endpoint="https://fake_deidentification_endpoint.com"
+)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/testpreparer_async.py b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/testpreparer_async.py
new file mode 100644
index 000000000000..5fa674a0c53d
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/generated_tests/testpreparer_async.py
@@ -0,0 +1,20 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) Python Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from azure.health.deidentification.aio import DeidentificationClient
+from devtools_testutils import AzureRecordedTestCase
+
+
+class DeidentificationClientTestBaseAsync(AzureRecordedTestCase):
+
+ def create_async_client(self, endpoint):
+ credential = self.get_credential(DeidentificationClient, is_async=True)
+ return self.create_client_from_credential(
+ DeidentificationClient,
+ credential=credential,
+ endpoint=endpoint,
+ )
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_create_and_wait_job_async.py b/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_create_and_wait_job_async.py
index e1816415c037..a145d0204952 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_create_and_wait_job_async.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_create_and_wait_job_async.py
@@ -54,9 +54,7 @@ async def sample_create_and_wait_job_async():
location=storage_location,
prefix=inputPrefix,
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=outputPrefix
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=outputPrefix),
)
async with client:
@@ -67,9 +65,7 @@ async def sample_create_and_wait_job_async():
print(f"Job Name: {finished_job.name}")
print(f"Job Status: {finished_job.status}") # Succeeded
- print(
- f"File Count: {finished_job.summary.total if finished_job.summary is not None else 0}"
- )
+ print(f"File Count: {finished_job.summary.total if finished_job.summary is not None else 0}")
# [END sample_create_and_wait_job_async]
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_list_job_files_async.py b/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_list_job_files_async.py
index 198153982a6c..da2a0f33025c 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_list_job_files_async.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/samples/async_samples/sample_list_job_files_async.py
@@ -53,9 +53,7 @@ async def sample_list_job_documents_async():
location=storage_location,
prefix=inputPrefix,
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=outputPrefix
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=outputPrefix),
)
print(f"Creating job with name: {jobname}")
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_create_and_wait_job.py b/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_create_and_wait_job.py
index 1c63c38cd86d..c91931dd5d56 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_create_and_wait_job.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_create_and_wait_job.py
@@ -54,9 +54,7 @@ def sample_create_and_wait_job():
location=storage_location,
prefix=inputPrefix,
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=outputPrefix
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=outputPrefix),
)
lro: LROPoller = client.begin_create_job(jobname, job)
@@ -65,9 +63,7 @@ def sample_create_and_wait_job():
finished_job: DeidentificationJob = lro.result()
print(f"Job Name: {finished_job.name}")
print(f"Job Status: {finished_job.status}")
- print(
- f"File Count: {finished_job.summary.total if finished_job.summary is not None else 0}"
- )
+ print(f"File Count: {finished_job.summary.total if finished_job.summary is not None else 0}")
# [END sample_create_and_wait_job]
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_list_job_files.py b/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_list_job_files.py
index 438ebfaa267a..31bb05f649d3 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_list_job_files.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/samples/sample_list_job_files.py
@@ -53,9 +53,7 @@ def sample_list_job_documents():
location=storage_location,
prefix=inputPrefix,
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=outputPrefix
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=outputPrefix),
)
print(f"Creating job with name: {jobname}")
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/sdk_packaging.toml b/sdk/healthdataaiservices/azure-health-deidentification/sdk_packaging.toml
new file mode 100644
index 000000000000..e7687fdae93b
--- /dev/null
+++ b/sdk/healthdataaiservices/azure-health-deidentification/sdk_packaging.toml
@@ -0,0 +1,2 @@
+[packaging]
+auto_update = false
\ No newline at end of file
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/conftest.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/conftest.py
index e7f366daf096..b7c8b5904091 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/conftest.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/conftest.py
@@ -23,8 +23,7 @@ def start_proxy(test_proxy, patch_sleep, patch_async_sleep):
@pytest.fixture(scope="session", autouse=True)
def create_session_uniquifier():
if (
- os.environ.get("AZURE_TEST_RUN_LIVE", "false").lower()
- == "true" # Don't override uniquifier by default
+ os.environ.get("AZURE_TEST_RUN_LIVE", "false").lower() == "true" # Don't override uniquifier by default
and os.environ.get("AZURE_SKIP_LIVE_RECORDING", "false").lower() != "true"
):
uniquifier = uuid.uuid4().hex[:6]
@@ -43,12 +42,8 @@ def add_sanitizers(test_proxy):
# $..id
# uri sanitization in favor of substitution
remove_batch_sanitizers(["AZSDK3493", "AZSDK3430", "AZSDK4001"])
- account_name = os.environ.get(
- "HEALTHDATAAISERVICES_STORAGE_ACCOUNT_NAME", "Not Found."
- )
- container_name = os.environ.get(
- "HEALTHDATAAISERVICES_STORAGE_CONTAINER_NAME", "Not Found."
- )
+ account_name = os.environ.get("HEALTHDATAAISERVICES_STORAGE_ACCOUNT_NAME", "Not Found.")
+ container_name = os.environ.get("HEALTHDATAAISERVICES_STORAGE_CONTAINER_NAME", "Not Found.")
add_body_key_sanitizer(
json_path="..location",
value=f"https://{account_name}.blob.core.windows.net:443/{container_name}",
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/deid_base_test_case.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/deid_base_test_case.py
index 383e72676898..b01ae83077d3 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/deid_base_test_case.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/deid_base_test_case.py
@@ -66,7 +66,5 @@ def generate_job_name(self) -> str:
def get_storage_location(self, kwargs):
storage_name: str = kwargs.pop("healthdataaiservices_storage_account_name")
container_name: str = kwargs.pop("healthdataaiservices_storage_container_name")
- storage_location = (
- f"https://{storage_name}.blob.core.windows.net/{container_name}"
- )
+ storage_location = f"https://{storage_name}.blob.core.windows.net/{container_name}"
return storage_location
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete.py
index cf2516fb7a8c..0a38df9ff504 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete.py
@@ -26,9 +26,7 @@ def test_create_cancel_delete(self, **kwargs):
location=storage_location,
prefix="example_patient_1",
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=self.OUTPUT_PATH
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=self.OUTPUT_PATH),
operation=OperationType.SURROGATE,
data_type=DocumentDataType.PLAINTEXT,
)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete_async.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete_async.py
index ccf32214ba37..63838285a9c3 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete_async.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_delete_async.py
@@ -27,9 +27,7 @@ async def test_create_cancel_delete_async(self, **kwargs):
location=storage_location,
prefix="example_patient_1",
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=self.OUTPUT_PATH
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=self.OUTPUT_PATH),
)
await client.begin_create_job(jobname, job)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_list_async.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_list_async.py
index 51383e5313a7..384de8dff99e 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_list_async.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_list_async.py
@@ -25,9 +25,7 @@ async def test_create_list_async(self, **kwargs):
location=storage_location,
prefix=inputPrefix,
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=self.OUTPUT_PATH
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=self.OUTPUT_PATH),
operation=OperationType.TAG,
data_type=DocumentDataType.PLAINTEXT,
)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish.py
index 79954337cf3b..416da2edaeba 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish.py
@@ -24,9 +24,7 @@ def test_create_wait_finish(self, **kwargs):
location=storage_location,
prefix=inputPrefix,
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=self.OUTPUT_PATH
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=self.OUTPUT_PATH),
operation=OperationType.SURROGATE,
data_type=DocumentDataType.PLAINTEXT,
)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish_async.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish_async.py
index 97af65c1d224..68a859d7ea42 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish_async.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_create_wait_finish_async.py
@@ -26,9 +26,7 @@ async def test_create_wait_finish_async(self, **kwargs):
location=storage_location,
prefix=inputPrefix,
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=self.OUTPUT_PATH
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=self.OUTPUT_PATH),
operation=OperationType.SURROGATE,
data_type=DocumentDataType.PLAINTEXT,
)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_exception_throws_async.py b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_exception_throws_async.py
index 2f1858a2ce9e..c8fa2da12337 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tests/test_exception_throws_async.py
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tests/test_exception_throws_async.py
@@ -29,9 +29,7 @@ async def test_exception_throws_async(self, **kwargs):
location=storage_location,
prefix="no_files_in_this_folder",
),
- target_location=TargetStorageLocation(
- location=storage_location, prefix=self.OUTPUT_PATH
- ),
+ target_location=TargetStorageLocation(location=storage_location, prefix=self.OUTPUT_PATH),
operation=OperationType.SURROGATE,
data_type=DocumentDataType.PLAINTEXT,
)
diff --git a/sdk/healthdataaiservices/azure-health-deidentification/tsp-location.yaml b/sdk/healthdataaiservices/azure-health-deidentification/tsp-location.yaml
index 090caaf79c28..421387b8ac95 100644
--- a/sdk/healthdataaiservices/azure-health-deidentification/tsp-location.yaml
+++ b/sdk/healthdataaiservices/azure-health-deidentification/tsp-location.yaml
@@ -1,5 +1,4 @@
directory: specification/healthdataaiservices/HealthDataAIServices.DeidServices
-commit: 2771da5baeee73dfd70b2a5f2813a55549c2aa73
-additionalDirectories: []
+commit: 0be55256bbf339fa9274dffcb6f053fb27898f08
repo: Azure/azure-rest-api-specs
-
+additionalDirectories: