diff --git a/sdk/monitor/azure-monitor-ingestion/MANIFEST.in b/sdk/monitor/azure-monitor-ingestion/MANIFEST.in index 516235bcc954..b43a4fd731ef 100644 --- a/sdk/monitor/azure-monitor-ingestion/MANIFEST.in +++ b/sdk/monitor/azure-monitor-ingestion/MANIFEST.in @@ -1,7 +1,7 @@ -recursive-include tests *.py -recursive-include samples *.py include *.md include LICENSE -include azure/__init__.py -include azure/monitor/__init__.py include azure/monitor/ingestion/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/monitor/__init__.py \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-ingestion/_meta.json b/sdk/monitor/azure-monitor-ingestion/_meta.json new file mode 100644 index 000000000000..d513b1e2d3b5 --- /dev/null +++ b/sdk/monitor/azure-monitor-ingestion/_meta.json @@ -0,0 +1,6 @@ +{ + "commit": "20b8ba31ffb3814d9001ad9618dc48bf3def925d", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "typespec_src": "specification/monitor/Ingestion", + "@azure-tools/typespec-python": "0.36.1" +} \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-ingestion/azure/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/__init__.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/__init__.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py index 5f92f88a4629..7caa71199ec4 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/__init__.py @@ -2,20 +2,31 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import LogsIngestionClient +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import -from ._patch import LogsUploadError +from ._client import LogsIngestionClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = [ - "LogsUploadError", "LogsIngestionClient", ] - +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py index 2029d7cbd28e..2a005f15fddb 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_client.py @@ -2,12 +2,13 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies @@ -18,20 +19,18 @@ from ._serialization import Deserializer, Serializer if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential -class LogsIngestionClient(LogsIngestionClientOperationsMixin): # pylint: disable=client-accepts-api-version-keyword - """Azure Monitor Data Collection Python Client. +class LogsIngestionClient(LogsIngestionClientOperationsMixin): + """Azure Monitor data collection client. - :param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example - https://dce-name.eastus-2.ingest.monitor.azure.com. Required. + :param endpoint: Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Required. + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: Api Version. Default value is "2023-01-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is "2023-01-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -90,7 +89,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "LogsIngestionClient": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py index fe4cf6f2f115..bca44c714a98 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_configuration.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -10,26 +10,24 @@ from azure.core.pipeline import policies +from ._version import VERSION + if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential -VERSION = "unknown" - -class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long +class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes """Configuration for LogsIngestionClient. Note that all parameters used to create this instance are saved as instance attributes. - :param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example - https://dce-name.eastus-2.ingest.monitor.azure.com. Required. + :param endpoint: Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Required. + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: Api Version. Default value is "2023-01-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is "2023-01-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -44,7 +42,7 @@ def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) self.endpoint = endpoint self.credential = credential self.api_version = api_version - self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com//.default"]) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "monitor-ingestion/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_helpers.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_helpers.py deleted file mode 100644 index 7a277bddc2a7..000000000000 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_helpers.py +++ /dev/null @@ -1,51 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import json -import logging -import sys -from typing import Any, Generator, List, Tuple -import zlib - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports - - -_LOGGER = logging.getLogger(__name__) -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object - -MAX_CHUNK_SIZE_BYTES = 1024 * 1024 # 1 MiB -GZIP_MAGIC_NUMBER = b"\x1f\x8b" - - -def _split_chunks(logs: List[JSON], max_size_bytes: int = MAX_CHUNK_SIZE_BYTES) -> Generator[List[JSON], None, None]: - chunk_size = 0 - curr_chunk = [] - for log in logs: - size = len(json.dumps(log).encode("utf-8")) - if chunk_size + size <= max_size_bytes: - curr_chunk.append(log) - chunk_size += size - else: - if curr_chunk: - _LOGGER.debug("Yielding chunk with size: %d", chunk_size) - yield curr_chunk - curr_chunk = [log] - chunk_size = size - if len(curr_chunk) > 0: - _LOGGER.debug("Yielding chunk with size: %d", chunk_size) - yield curr_chunk - - -def _create_gzip_requests(logs: List[JSON]) -> Generator[Tuple[bytes, List[JSON]], None, None]: - for chunk in _split_chunks(logs): - # cspell:ignore wbits - zlib_mode = 16 + zlib.MAX_WBITS # for gzip encoding - _compress = zlib.compressobj(wbits=zlib_mode) - data = _compress.compress(bytes(json.dumps(chunk), encoding="utf-8")) - data += _compress.flush() - _LOGGER.debug("Yielding gzip compressed data, Length: %d", len(data)) - yield data, chunk diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_model_base.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_model_base.py new file mode 100644 index 000000000000..e6a2730f9276 --- /dev/null +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_model_base.py @@ -0,0 +1,1159 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + return self._data.popitem() + + def clear(self) -> None: + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) # pylint: disable=no-value-for-parameter + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_models.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_models.py deleted file mode 100644 index 5302a9522d9e..000000000000 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_models.py +++ /dev/null @@ -1,32 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import sys -from typing import Any, List - -if sys.version_info >= (3, 9): - from collections.abc import Mapping -else: - from typing import Mapping # type: ignore # pylint: disable=ungrouped-imports - -JSON = Mapping[str, Any] # pylint: disable=unsubscriptable-object - - -class LogsUploadError: - """Error information for a failed upload to Azure Monitor. - - :param error: The error that occurred during the upload. - :type error: Exception - :param failed_logs: The list of logs that failed to upload. - :type failed_logs: list[JSON] - """ - - error: Exception - """The error that occurred during the upload.""" - failed_logs: List[JSON] - """The list of logs that failed to upload.""" - - def __init__(self, error: Exception, failed_logs: List[JSON]) -> None: - self.error = error - self.failed_logs = failed_logs diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py index dd3d0ef4d235..34ad2ded0df9 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/__init__.py @@ -2,17 +2,24 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import LogsIngestionClientOperationsMixin +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import +from ._operations import LogsIngestionClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ "LogsIngestionClientOperationsMixin", ] - +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py index 3d6dc22e4243..cda3a758cc0d 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py @@ -1,14 +1,14 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase +import json import sys -from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -23,14 +23,14 @@ from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict +from .._model_base import SdkJSONEncoder from .._serialization import Serializer from .._vendor import LogsIngestionClientMixinABC if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -39,7 +39,7 @@ def build_logs_ingestion_upload_request( - rule_id: str, stream: str, *, content_encoding: Optional[str] = None, **kwargs: Any + rule_id: str, stream_name: str, *, content_encoding: Optional[str] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -52,7 +52,7 @@ def build_logs_ingestion_upload_request( _url = "/dataCollectionRules/{ruleId}/streams/{stream}" path_format_arguments = { "ruleId": _SERIALIZER.url("rule_id", rule_id, "str"), - "stream": _SERIALIZER.url("stream", stream, "str"), + "stream": _SERIALIZER.url("stream_name", stream_name, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -71,60 +71,60 @@ def build_logs_ingestion_upload_request( class LogsIngestionClientOperationsMixin(LogsIngestionClientMixinABC): + @overload - def _upload( # pylint: disable=inconsistent-return-statements + def _upload( self, rule_id: str, - stream: str, - body: List[JSON], + stream_name: str, + body: List[Dict[str, Any]], *, content_encoding: Optional[str] = None, content_type: str = "application/json", **kwargs: Any - ) -> None: - ... - + ) -> None: ... @overload - def _upload( # pylint: disable=inconsistent-return-statements + def _upload( self, rule_id: str, - stream: str, + stream_name: str, body: IO[bytes], *, content_encoding: Optional[str] = None, content_type: str = "application/json", **kwargs: Any - ) -> None: - ... + ) -> None: ... @distributed_trace def _upload( # pylint: disable=inconsistent-return-statements self, rule_id: str, - stream: str, - body: Union[List[JSON], IO[bytes]], + stream_name: str, + body: Union[List[Dict[str, Any]], IO[bytes]], *, content_encoding: Optional[str] = None, **kwargs: Any ) -> None: """Ingestion API used to directly ingest data using Data Collection Rules. - See error response code and error response message for more detail. + Ingestion API used to directly ingest data using Data Collection Rules. - :param rule_id: The immutable Id of the Data Collection Rule resource. Required. + :param rule_id: The immutable ID of the Data Collection Rule resource. Required. :type rule_id: str - :param stream: The streamDeclaration name as defined in the Data Collection Rule. Required. - :type stream: str - :param body: An array of objects matching the schema defined by the provided stream. Is either - a [JSON] type or a IO[bytes] type. Required. - :type body: list[JSON] or IO[bytes] - :keyword content_encoding: gzip. Default value is None. + :param stream_name: The streamDeclaration name as defined in the Data Collection Rule. + Required. + :type stream_name: str + :param body: The array of objects matching the schema defined by the provided stream. Is either + a [{str: Any}] type or a IO[bytes] type. Required. + :type body: list[dict[str, any]] or IO[bytes] + :keyword content_encoding: The content encoding of the request body which is always 'gzip'. + Default value is None. :paramtype content_encoding: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -139,20 +139,18 @@ def _upload( # pylint: disable=inconsistent-return-statements cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" - _json = None _content = None if isinstance(body, (IOBase, bytes)): _content = body else: - _json = body + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore _request = build_logs_ingestion_upload_request( rule_id=rule_id, - stream=stream, + stream_name=stream_name, content_encoding=content_encoding, content_type=content_type, api_version=self._config.api_version, - json=_json, content=_content, headers=_headers, params=_params, @@ -170,8 +168,6 @@ def _upload( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py index 4768d10331eb..f7dd32510333 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_patch.py @@ -6,89 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from collections.abc import Sequence -from io import IOBase -import logging -import sys -from typing import Callable, cast, List, Any, Optional, Union, IO +from typing import List -from ._operations import LogsIngestionClientOperationsMixin as GeneratedOps -from .._helpers import _create_gzip_requests, GZIP_MAGIC_NUMBER -from .._models import LogsUploadError - -if sys.version_info >= (3, 9): - from collections.abc import Mapping, MutableMapping -else: - from typing import Mapping, MutableMapping # type: ignore # pylint: disable=ungrouped-imports - - -_LOGGER = logging.getLogger(__name__) -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object - - -class LogsIngestionClientOperationsMixin(GeneratedOps): - def upload( - self, - rule_id: str, - stream_name: str, - logs: Union[List[JSON], IO[bytes]], - *, - on_error: Optional[Callable[[LogsUploadError], None]] = None, - **kwargs: Any - ) -> None: - """Ingestion API used to directly ingest data using Data Collection Rules. - - A list of logs is divided into chunks of 1MB or less, then each chunk is gzip-compressed and uploaded. - If an I/O stream is passed in, the stream is uploaded as-is. - - :param rule_id: The immutable ID of the Data Collection Rule resource. - :type rule_id: str - :param stream_name: The streamDeclaration name as defined in the Data Collection Rule. - :type stream_name: str - :param logs: An array of objects matching the schema defined by the provided stream. - :type logs: list[JSON] or IO - :keyword on_error: The callback function that is called when a chunk of logs fails to upload. - This function should expect one argument that corresponds to an "LogsUploadError" object. - If no function is provided, then the first exception encountered will be raised. - :paramtype on_error: Optional[Callable[[~azure.monitor.ingestion.LogsUploadError], None]] - :return: None - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - if isinstance(logs, IOBase): - if not logs.readable(): - raise ValueError("The 'logs' stream must be readable.") - content_encoding = None - # Check if the stream is gzip-compressed if stream is seekable. - if logs.seekable(): - if logs.read(2) == GZIP_MAGIC_NUMBER: - content_encoding = "gzip" - logs.seek(0) - - super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs) - return - - if not isinstance(logs, Sequence) or isinstance(logs, str): - raise ValueError( - "The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable." - ) - - for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)): - try: - super()._upload( # type: ignore - rule_id, stream=stream_name, body=gzip_data, content_encoding="gzip", **kwargs # type: ignore - ) - except Exception as err: # pylint: disable=broad-except - if on_error: - on_error(LogsUploadError(error=err, failed_logs=cast(List[Mapping[str, Any]], log_chunk))) - else: - _LOGGER.error("Failed to upload chunk containing %d log entries", len(log_chunk)) - raise err - - -__all__: List[str] = [ - "LogsIngestionClientOperationsMixin" -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_patch.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_patch.py index ef7425309147..f7dd32510333 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_patch.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_patch.py @@ -6,43 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from ._client import LogsIngestionClient as GeneratedClient -from ._models import LogsUploadError +from typing import List - -class LogsIngestionClient(GeneratedClient): - """The synchronous client for uploading logs to Azure Monitor. - - :param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example - https://dce-name.eastus-2.ingest.monitor.azure.com. - :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: Api Version. Default value is "2023-01-01". Note that overriding - this default value may result in unsupported behavior. - :paramtype api_version: str - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_authentication.py - :start-after: [START create_client_public_cloud] - :end-before: [END create_client_public_cloud] - :language: python - :dedent: 4 - :caption: Creating the LogsIngestionClient with DefaultAzureCredential. - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_authentication.py - :start-after: [START create_client_sovereign_cloud] - :end-before: [END create_client_sovereign_cloud] - :language: python - :dedent: 4 - :caption: Creating the LogsIngestionClient for use with a sovereign cloud (i.e. non-public cloud). - """ - - -__all__ = ["LogsIngestionClient", "LogsUploadError"] +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py index 2f781d740827..ce17d1798ce7 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_serialization.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. @@ -24,7 +25,6 @@ # # -------------------------------------------------------------------------- -# pylint: skip-file # pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode @@ -52,7 +52,6 @@ MutableMapping, Type, List, - Mapping, ) try: @@ -91,6 +90,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. + :return: The deserialized data. + :rtype: object """ if hasattr(data, "read"): # Assume a stream @@ -112,7 +113,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: try: return json.loads(data_as_str) except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) + raise DeserializationError("JSON is invalid: {}".format(err), err) from err elif "xml" in (content_type or []): try: @@ -144,6 +145,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod @@ -153,6 +156,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object """ # Try to use content-type from headers if available content_type = None @@ -182,15 +190,30 @@ class UTC(datetime.tzinfo): """Time Zone info for handling UTC""" def utcoffset(self, dt): - """UTF offset for UTC is 0.""" + """UTF offset for UTC is 0. + + :param datetime.datetime dt: The datetime + :returns: The offset + :rtype: datetime.timedelta + """ return datetime.timedelta(0) def tzname(self, dt): - """Timestamp representation.""" + """Timestamp representation. + + :param datetime.datetime dt: The datetime + :returns: The timestamp representation + :rtype: str + """ return "Z" def dst(self, dt): - """No daylight saving for UTC.""" + """No daylight saving for UTC. + + :param datetime.datetime dt: The datetime + :returns: The daylight saving time + :rtype: datetime.timedelta + """ return datetime.timedelta(hours=1) @@ -204,7 +227,7 @@ class _FixedOffset(datetime.tzinfo): # type: ignore :param datetime.timedelta offset: offset in timedelta format """ - def __init__(self, offset): + def __init__(self, offset) -> None: self.__offset = offset def utcoffset(self, dt): @@ -233,24 +256,26 @@ def __getinitargs__(self): _FLATTEN = re.compile(r"(? None: self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: + for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) elif k in self._validation and self._validation[k].get("readonly", False): @@ -298,13 +330,23 @@ def __init__(self, **kwargs: Any) -> None: setattr(self, k, kwargs[k]) def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ return not self.__eq__(other) def __str__(self) -> str: @@ -324,7 +366,11 @@ def is_xml_model(cls) -> bool: @classmethod def _create_xml_node(cls): - """Create XML node.""" + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ try: xml_map = cls._xml_map # type: ignore except AttributeError: @@ -344,7 +390,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) def as_dict( self, @@ -378,12 +426,15 @@ def my_key_transformer(key, attr_desc, value): If you want XML serialization, you can pass the kwargs is_xml=True. + :param bool keep_readonly: If you want to serialize the readonly attributes :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) @classmethod def _infer_class_models(cls): @@ -393,7 +444,7 @@ def _infer_class_models(cls): client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} if cls.__name__ not in client_models: raise ValueError("Not Autorest generated code") - except Exception: + except Exception: # pylint: disable=broad-exception-caught # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. client_models = {cls.__name__: cls} return client_models @@ -406,6 +457,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong + :rtype: ModelType """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @@ -424,9 +476,11 @@ def from_dict( and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model :raises: DeserializationError if something went wrong + :rtype: ModelType """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -446,21 +500,25 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access return result @classmethod def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. - Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class """ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): subtype_value = None if not isinstance(response, ET.Element): rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) else: subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) if subtype_value: @@ -499,11 +557,13 @@ def _decode_attribute_map_key(key): inside the received data. :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str """ return key.replace("\\.", ".") -class Serializer(object): +class Serializer(object): # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -538,7 +598,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes: Optional[Mapping[str, type]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -558,13 +618,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None): self.key_transformer = full_restapi_key_transformer self.client_side_validation = True - def _serialize(self, target_obj, data_type=None, **kwargs): + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): """Serialize data into a string according to type. - :param target_obj: The data to be serialized. + :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict :raises: SerializationError if serialization fails. + :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) keep_readonly = kwargs.get("keep_readonly", False) @@ -590,12 +653,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs): serialized = {} if is_xml_model_serialization: - serialized = target_obj._create_xml_node() + serialized = target_obj._create_xml_node() # pylint: disable=protected-access try: - attributes = target_obj._attribute_map + attributes = target_obj._attribute_map # pylint: disable=protected-access for attr, attr_desc in attributes.items(): attr_name = attr - if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): continue if attr_name == "additional_properties" and attr_desc["key"] == "": @@ -631,7 +696,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if isinstance(new_attr, list): serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. if "name" not in getattr(orig_attr, "_xml_map", {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace @@ -662,17 +728,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs): except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) raise SerializationError(msg) from err - else: - return serialized + return serialized def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None + :returns: The serialized request body """ # Just in case this is a dict @@ -701,7 +767,7 @@ def body(self, data, data_type, **kwargs): attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, ] - data = deserializer._deserialize(data_type, data) + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access except DeserializationError as err: raise SerializationError("Unable to build a model: " + str(err)) from err @@ -710,9 +776,11 @@ def body(self, data, data_type, **kwargs): def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. - :param data: The data to be serialized. + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str + :returns: The serialized URL path :raises: TypeError if serialization fails. :raises: ValueError if data is None """ @@ -726,21 +794,20 @@ def url(self, name, data, data_type, **kwargs): output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return output + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output def query(self, name, data, data_type, **kwargs): """Serialize data intended for a URL query. - :param data: The data to be serialized. + :param str name: The name of the query parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :keyword bool skip_quote: Whether to skip quote the serialized result. - Defaults to False. :rtype: str, list :raises: TypeError if serialization fails. :raises: ValueError if data is None + :returns: The serialized query parameter """ try: # Treat the list aside, since we don't want to encode the div separator @@ -757,19 +824,20 @@ def query(self, name, data, data_type, **kwargs): output = str(output) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. - :param data: The data to be serialized. + :param str name: The name of the header. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None + :returns: The serialized header """ try: if data_type in ["[str]"]: @@ -778,21 +846,20 @@ def header(self, name, data, data_type, **kwargs): output = self.serialize_data(data, data_type, **kwargs) if data_type == "bool": output = json.dumps(output) - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :param bool required: Whether it's essential that the data not be - empty or None :raises: AttributeError if required data is None. :raises: ValueError if data is None :raises: SerializationError if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list """ if data is None: raise ValueError("No value for given attribute") @@ -803,7 +870,7 @@ def serialize_data(self, data, data_type, **kwargs): if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) - elif data_type in self.serialize_type: + if data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class @@ -819,11 +886,10 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise SerializationError(msg.format(data, data_type)) from err - else: - return self._serialize(data, **kwargs) + return self._serialize(data, **kwargs) @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer @@ -839,23 +905,26 @@ def serialize_basic(cls, data, data_type, **kwargs): - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - is_xml bool : If set, use xml_basic_types_serializers - :param data: Object to be serialized. + :param obj data: Object to be serialized. :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec + return eval(data_type)(data) # nosec # pylint: disable=eval-used @classmethod def serialize_unicode(cls, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. - :param data: Object to be serialized. + :param str data: Object to be serialized. :rtype: str + :return: serialized object """ try: # If I received an enum, return its value return data.value @@ -869,8 +938,7 @@ def serialize_unicode(cls, data): return data except NameError: return str(data) - else: - return str(data) + return str(data) def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. @@ -880,15 +948,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): serialization_ctxt['type'] should be same as data_type. - is_xml bool : If set, serialize as XML - :param list attr: Object to be serialized. + :param list data: Object to be serialized. :param str iter_type: Type of object in the iterable. - :param bool required: Whether the objects in the iterable must - not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. - :keyword bool do_quote: Whether to quote the serialized result of each iterable element. Defaults to False. :rtype: list, str + :return: serialized iterable """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") @@ -943,9 +1009,8 @@ def serialize_dict(self, attr, dict_type, **kwargs): :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. - :param bool required: Whether the objects in the dictionary must - not be None or empty. :rtype: dict + :return: serialized dictionary """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} @@ -969,7 +1034,7 @@ def serialize_dict(self, attr, dict_type, **kwargs): return serialized - def serialize_object(self, attr, **kwargs): + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Serialize a generic object. This will be handled as a dictionary. If object passed in is not a basic type (str, int, float, dict, list) it will simply be @@ -977,6 +1042,7 @@ def serialize_object(self, attr, **kwargs): :param dict attr: Object to be serialized. :rtype: dict or str + :return: serialized object """ if attr is None: return None @@ -1001,7 +1067,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_decimal(attr) # If it's a model or I know this dependency, serialize as a Model - elif obj_type in self.dependencies.values() or isinstance(attr, Model): + if obj_type in self.dependencies.values() or isinstance(attr, Model): return self._serialize(attr) if obj_type == dict: @@ -1032,56 +1098,61 @@ def serialize_enum(attr, enum_obj=None): try: enum_obj(result) # type: ignore return result - except ValueError: + except ValueError as exc: for enum_value in enum_obj: # type: ignore if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) + raise SerializationError(error.format(attr, enum_obj)) from exc @staticmethod - def serialize_bytearray(attr, **kwargs): + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument """Serialize bytearray into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ return b64encode(attr).decode() @staticmethod - def serialize_base64(attr, **kwargs): + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument """Serialize str into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ encoded = b64encode(attr).decode("ascii") return encoded.strip("=").replace("+", "-").replace("/", "_") @staticmethod - def serialize_decimal(attr, **kwargs): + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument """Serialize Decimal object to float. - :param attr: Object to be serialized. + :param decimal attr: Object to be serialized. :rtype: float + :return: serialized decimal """ return float(attr) @staticmethod - def serialize_long(attr, **kwargs): + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument """Serialize long (Py2) or int (Py3). - :param attr: Object to be serialized. + :param int attr: Object to be serialized. :rtype: int/long + :return: serialized long """ return _long_type(attr) @staticmethod - def serialize_date(attr, **kwargs): + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str + :return: serialized date """ if isinstance(attr, str): attr = isodate.parse_date(attr) @@ -1089,11 +1160,12 @@ def serialize_date(attr, **kwargs): return t @staticmethod - def serialize_time(attr, **kwargs): + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument """Serialize Time object into ISO-8601 formatted string. :param datetime.time attr: Object to be serialized. :rtype: str + :return: serialized time """ if isinstance(attr, str): attr = isodate.parse_time(attr) @@ -1103,30 +1175,32 @@ def serialize_time(attr, **kwargs): return t @staticmethod - def serialize_duration(attr, **kwargs): + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str + :return: serialized duration """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr) @staticmethod - def serialize_rfc(attr, **kwargs): + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: TypeError if format invalid. + :return: serialized rfc """ try: if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() - except AttributeError: - raise TypeError("RFC1123 object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], @@ -1139,12 +1213,13 @@ def serialize_rfc(attr, **kwargs): ) @staticmethod - def serialize_iso(attr, **kwargs): + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid. + :return: serialized iso """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) @@ -1170,13 +1245,14 @@ def serialize_iso(attr, **kwargs): raise TypeError(msg) from err @staticmethod - def serialize_unix(attr, **kwargs): + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into IntTime format. This is represented as seconds. :param Datetime attr: Object to be serialized. :rtype: int :raises: SerializationError if format invalid + :return: serialied unix """ if isinstance(attr, int): return attr @@ -1184,11 +1260,11 @@ def serialize_unix(attr, **kwargs): if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) - except AttributeError: - raise TypeError("Unix time object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc -def rest_key_extractor(attr, attr_desc, data): +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument key = attr_desc["key"] working_data = data @@ -1209,7 +1285,9 @@ def rest_key_extractor(attr, attr_desc, data): return working_data.get(key) -def rest_key_case_insensitive_extractor(attr, attr_desc, data): +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): key = attr_desc["key"] working_data = data @@ -1230,17 +1308,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): return attribute_key_case_insensitive_extractor(key, None, working_data) -def last_rest_key_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key.""" +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) return attribute_key_extractor(dict_keys[-1], None, data) -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument """Extract the attribute in "data" based on the last part of the JSON path key. This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) @@ -1277,7 +1367,7 @@ def _extract_name_from_internal_type(internal_type): return xml_name -def xml_key_extractor(attr, attr_desc, data): +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements if isinstance(data, dict): return None @@ -1329,22 +1419,21 @@ def xml_key_extractor(attr, attr_desc, data): if is_iter_type: if is_wrapped: return None # is_wrapped no node, we want None - else: - return [] # not wrapped, assume empty list + return [] # not wrapped, assume empty list return None # Assume it's not there, maybe an optional node. # If is_iter_type and not wrapped, return all found children if is_iter_type: if not is_wrapped: return children - else: # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + xml_name ) - return list(children[0]) # Might be empty list and that's ok. + ) + return list(children[0]) # Might be empty list and that's ok. # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: @@ -1361,9 +1450,9 @@ class Deserializer(object): basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes: Optional[Mapping[str, type]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1401,11 +1490,12 @@ def __call__(self, target_obj, response_data, content_type=None): :param str content_type: Swagger "produces" if available. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ data = self._unpack_content(response_data, content_type) return self._deserialize(target_obj, data) - def _deserialize(self, target_obj, data): + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree @@ -1414,12 +1504,13 @@ def _deserialize(self, target_obj, data): :param object data: Object to deserialize. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] try: - for attr, mapconfig in data._attribute_map.items(): + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1438,13 +1529,13 @@ def _deserialize(self, target_obj, data): if isinstance(response, str): return self.deserialize_data(data, response) - elif isinstance(response, type) and issubclass(response, Enum): + if isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) - if data is None: + if data is None or data is CoreNull: return data try: - attributes = response._attribute_map # type: ignore + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... @@ -1474,9 +1565,8 @@ def _deserialize(self, target_obj, data): except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore raise DeserializationError(msg) from err - else: - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: @@ -1503,6 +1593,8 @@ def _classify_target(self, target, data): :param str target: The target object type to deserialize to. :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple """ if target is None: return None, None @@ -1514,7 +1606,7 @@ def _classify_target(self, target, data): return target, target try: - target = target._classify(data, self.dependencies) # type: ignore + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1529,10 +1621,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): :param str target_obj: The target object type to deserialize to. :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object """ try: return self(target_obj, data, content_type=content_type) - except: + except: # pylint: disable=bare-except _LOGGER.debug( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -1550,10 +1644,12 @@ def _unpack_content(raw_data, content_type=None): If raw_data is something else, bypass all logic and return it directly. - :param raw_data: Data to be processed. - :param content_type: How to parse if raw_data is a string/bytes. + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. """ # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) @@ -1577,14 +1673,21 @@ def _unpack_content(raw_data, content_type=None): def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. - :param response: The response model class. - :param d_attrs: The deserialized response attributes. + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. """ if callable(response): subtype = getattr(response, "_subtype_map", {}) try: - readonly = [k for k, v in response._validation.items() if v.get("readonly")] - const = [k for k, v in response._validation.items() if v.get("constant")] + readonly = [ + k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access + ] + const = [ + k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access + ] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: @@ -1594,7 +1697,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None): return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) + raise DeserializationError(msg + str(err)) from err else: try: for attr, value in attrs.items(): @@ -1603,15 +1706,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None): except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) + raise DeserializationError(msg) from exp - def deserialize_data(self, data, data_type): + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. :raises: DeserializationError if deserialization fails. :return: Deserialized object. + :rtype: object """ if data is None: return data @@ -1625,7 +1729,11 @@ def deserialize_data(self, data, data_type): if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data - is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) @@ -1645,14 +1753,14 @@ def deserialize_data(self, data, data_type): msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise DeserializationError(msg) from err - else: - return self._deserialize(obj_type, data) + return self._deserialize(obj_type, data) def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. :rtype: list """ if attr is None: @@ -1669,6 +1777,7 @@ def deserialize_dict(self, attr, dict_type): :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. :rtype: dict """ if isinstance(attr, list): @@ -1679,11 +1788,12 @@ def deserialize_dict(self, attr, dict_type): attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - def deserialize_object(self, attr, **kwargs): + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. :rtype: dict :raises: TypeError if non-builtin datatype encountered. """ @@ -1718,11 +1828,10 @@ def deserialize_object(self, attr, **kwargs): pass return deserialized - else: - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) - def deserialize_basic(self, attr, data_type): + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements """Deserialize basic builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as @@ -1730,6 +1839,7 @@ def deserialize_basic(self, attr, data_type): :param str attr: response string to be deserialized. :param str data_type: deserialization data type. + :return: Deserialized basic type. :rtype: str, int, float or bool :raises: TypeError if string format is not valid. """ @@ -1741,24 +1851,23 @@ def deserialize_basic(self, attr, data_type): if data_type == "str": # None or '', node is empty string. return "" - else: - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, str): + if isinstance(attr, str): if attr.lower() in ["true", "1"]: return True - elif attr.lower() in ["false", "0"]: + if attr.lower() in ["false", "0"]: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec + return eval(data_type)(attr) # nosec # pylint: disable=eval-used @staticmethod def deserialize_unicode(data): @@ -1766,6 +1875,7 @@ def deserialize_unicode(data): as a string. :param str data: response string to be deserialized. + :return: Deserialized string. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, @@ -1779,8 +1889,7 @@ def deserialize_unicode(data): return data except NameError: return str(data) - else: - return str(data) + return str(data) @staticmethod def deserialize_enum(data, enum_obj): @@ -1792,6 +1901,7 @@ def deserialize_enum(data, enum_obj): :param str data: Response string to be deserialized. If this value is None or invalid it will be returned as-is. :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. :rtype: Enum """ if isinstance(data, enum_obj) or data is None: @@ -1802,9 +1912,9 @@ def deserialize_enum(data, enum_obj): # Workaround. We might consider remove it in the future. try: return list(enum_obj.__members__.values())[data] - except IndexError: + except IndexError as exc: error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) + raise DeserializationError(error.format(data, enum_obj)) from exc try: return enum_obj(str(data)) except ValueError: @@ -1820,6 +1930,7 @@ def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. + :return: Deserialized bytearray :rtype: bytearray :raises: TypeError if string format invalid. """ @@ -1832,6 +1943,7 @@ def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. + :return: Deserialized base64 string :rtype: bytearray :raises: TypeError if string format invalid. """ @@ -1847,8 +1959,9 @@ def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. - :rtype: Decimal + :return: Deserialized decimal :raises: DeserializationError if string format invalid. + :rtype: decimal """ if isinstance(attr, ET.Element): attr = attr.text @@ -1863,6 +1976,7 @@ def deserialize_long(attr): """Deserialize string into long (Py2) or int (Py3). :param str attr: response string to be deserialized. + :return: Deserialized int :rtype: long or int :raises: ValueError if string format invalid. """ @@ -1875,6 +1989,7 @@ def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. + :return: Deserialized duration :rtype: TimeDelta :raises: DeserializationError if string format invalid. """ @@ -1885,14 +2000,14 @@ def deserialize_duration(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise DeserializationError(msg) from err - else: - return duration + return duration @staticmethod def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. + :return: Deserialized date :rtype: Date :raises: DeserializationError if string format invalid. """ @@ -1908,6 +2023,7 @@ def deserialize_time(attr): """Deserialize ISO-8601 formatted string into time object. :param str attr: response string to be deserialized. + :return: Deserialized time :rtype: datetime.time :raises: DeserializationError if string format invalid. """ @@ -1922,6 +2038,7 @@ def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime :rtype: Datetime :raises: DeserializationError if string format invalid. """ @@ -1937,14 +2054,14 @@ def deserialize_rfc(attr): except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime :rtype: Datetime :raises: DeserializationError if string format invalid. """ @@ -1974,8 +2091,7 @@ def deserialize_iso(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_unix(attr): @@ -1983,6 +2099,7 @@ def deserialize_unix(attr): This is represented as seconds. :param int attr: Object to be serialized. + :return: Deserialized datetime :rtype: Datetime :raises: DeserializationError if format invalid """ @@ -1994,5 +2111,4 @@ def deserialize_unix(attr): except ValueError as err: msg = "Cannot deserialize to unix datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py index d24f8d5ac6cb..5bf1f6cfcc85 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_vendor.py @@ -1,7 +1,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -11,7 +11,6 @@ from ._configuration import LogsIngestionClientConfiguration if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core import PipelineClient from ._serialization import Deserializer, Serializer diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_version.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_version.py index 2e762a036cb0..be71c81bd282 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_version.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_version.py @@ -2,8 +2,8 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.5" +VERSION = "1.0.0b1" diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py index 93e4aa376af9..6273743bd569 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/__init__.py @@ -2,18 +2,28 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import LogsIngestionClient +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import +from ._client import LogsIngestionClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = [ "LogsIngestionClient", ] - +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py index f7160c828671..104814041484 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_client.py @@ -2,12 +2,13 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies @@ -18,20 +19,18 @@ from ._operations import LogsIngestionClientOperationsMixin if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -class LogsIngestionClient(LogsIngestionClientOperationsMixin): # pylint: disable=client-accepts-api-version-keyword - """Azure Monitor Data Collection Python Client. +class LogsIngestionClient(LogsIngestionClientOperationsMixin): + """Azure Monitor data collection client. - :param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example - https://dce-name.eastus-2.ingest.monitor.azure.com. Required. + :param endpoint: Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Required. + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: Api Version. Default value is "2023-01-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is "2023-01-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -92,7 +91,7 @@ def send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "LogsIngestionClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py index b3023503d074..d0718255c19c 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_configuration.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -10,26 +10,24 @@ from azure.core.pipeline import policies +from .._version import VERSION + if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -VERSION = "unknown" - -class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long +class LogsIngestionClientConfiguration: # pylint: disable=too-many-instance-attributes """Configuration for LogsIngestionClient. Note that all parameters used to create this instance are saved as instance attributes. - :param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example - https://dce-name.eastus-2.ingest.monitor.azure.com. Required. + :param endpoint: Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. Required. + :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: Api Version. Default value is "2023-01-01". Note that overriding this - default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is "2023-01-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -44,7 +42,7 @@ def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: self.endpoint = endpoint self.credential = credential self.api_version = api_version - self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com//.default"]) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://monitor.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "monitor-ingestion/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py index dd3d0ef4d235..34ad2ded0df9 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/__init__.py @@ -2,17 +2,24 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._patch import LogsIngestionClientOperationsMixin +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import +from ._operations import LogsIngestionClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ "LogsIngestionClientOperationsMixin", ] - +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py index 965e3180520e..0fd4375846b7 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py @@ -1,14 +1,14 @@ -# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase +import json import sys -from typing import Any, Callable, Dict, IO, List, Optional, Type, TypeVar, Union, overload +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -23,73 +23,73 @@ from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict +from ..._model_base import SdkJSONEncoder from ..._operations._operations import build_logs_ingestion_upload_request from .._vendor import LogsIngestionClientMixinABC if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class LogsIngestionClientOperationsMixin(LogsIngestionClientMixinABC): + @overload - async def _upload( # pylint: disable=inconsistent-return-statements + async def _upload( self, rule_id: str, - stream: str, - body: List[JSON], + stream_name: str, + body: List[Dict[str, Any]], *, content_encoding: Optional[str] = None, content_type: str = "application/json", **kwargs: Any - ) -> None: - ... - + ) -> None: ... @overload - async def _upload( # pylint: disable=inconsistent-return-statements + async def _upload( self, rule_id: str, - stream: str, + stream_name: str, body: IO[bytes], *, content_encoding: Optional[str] = None, content_type: str = "application/json", **kwargs: Any - ) -> None: - ... + ) -> None: ... @distributed_trace_async - async def _upload( # pylint: disable=inconsistent-return-statements + async def _upload( self, rule_id: str, - stream: str, - body: Union[List[JSON], IO[bytes]], + stream_name: str, + body: Union[List[Dict[str, Any]], IO[bytes]], *, content_encoding: Optional[str] = None, **kwargs: Any ) -> None: """Ingestion API used to directly ingest data using Data Collection Rules. - See error response code and error response message for more detail. + Ingestion API used to directly ingest data using Data Collection Rules. - :param rule_id: The immutable Id of the Data Collection Rule resource. Required. + :param rule_id: The immutable ID of the Data Collection Rule resource. Required. :type rule_id: str - :param stream: The streamDeclaration name as defined in the Data Collection Rule. Required. - :type stream: str - :param body: An array of objects matching the schema defined by the provided stream. Is either - a [JSON] type or a IO[bytes] type. Required. - :type body: list[JSON] or IO[bytes] - :keyword content_encoding: gzip. Default value is None. + :param stream_name: The streamDeclaration name as defined in the Data Collection Rule. + Required. + :type stream_name: str + :param body: The array of objects matching the schema defined by the provided stream. Is either + a [{str: Any}] type or a IO[bytes] type. Required. + :type body: list[dict[str, any]] or IO[bytes] + :keyword content_encoding: The content encoding of the request body which is always 'gzip'. + Default value is None. :paramtype content_encoding: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -104,20 +104,18 @@ async def _upload( # pylint: disable=inconsistent-return-statements cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" - _json = None _content = None if isinstance(body, (IOBase, bytes)): _content = body else: - _json = body + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore _request = build_logs_ingestion_upload_request( rule_id=rule_id, - stream=stream, + stream_name=stream_name, content_encoding=content_encoding, content_type=content_type, api_version=self._config.api_version, - json=_json, content=_content, headers=_headers, params=_params, @@ -135,8 +133,6 @@ async def _upload( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py index a1dfc23c02ca..f7dd32510333 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_patch.py @@ -6,90 +6,9 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from collections.abc import Sequence -from io import IOBase -import logging -import sys -from typing import Callable, cast, List, Any, Awaitable, Optional, Union, IO +from typing import List -from ._operations import LogsIngestionClientOperationsMixin as GeneratedOps -from ..._helpers import _create_gzip_requests, GZIP_MAGIC_NUMBER -from ..._models import LogsUploadError - -if sys.version_info >= (3, 9): - from collections.abc import Mapping, MutableMapping -else: - from typing import Mapping, MutableMapping # type: ignore # pylint: disable=ungrouped-imports - - -_LOGGER = logging.getLogger(__name__) -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object - - -class LogsIngestionClientOperationsMixin(GeneratedOps): - async def upload( - self, - rule_id: str, - stream_name: str, - logs: Union[List[JSON], IO[bytes]], - *, - on_error: Optional[Callable[[LogsUploadError], Awaitable[None]]] = None, - **kwargs: Any - ) -> None: - """Ingestion API used to directly ingest data using Data Collection Rules. - - A list of logs is divided into chunks of 1MB or less, then each chunk is gzip-compressed and uploaded. - If an I/O stream is passed in, the stream is uploaded as-is. - - :param rule_id: The immutable ID of the Data Collection Rule resource. - :type rule_id: str - :param stream_name: The streamDeclaration name as defined in the Data Collection Rule. - :type stream_name: str - :param logs: An array of objects matching the schema defined by the provided stream. - :type logs: list[JSON] or IO - :keyword on_error: The callback function that is called when a chunk of logs fails to upload. - This function should expect one argument that corresponds to an "LogsUploadError" object. - If no function is provided, then the first exception encountered will be raised. - :paramtype on_error: Optional[Callable[[~azure.monitor.ingestion.LogsUploadError], None]] - :return: None - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - if isinstance(logs, IOBase): - if not logs.readable(): - raise ValueError("The 'logs' stream must be readable.") - content_encoding = None - # Check if the stream is gzip-compressed if stream is seekable. - if logs.seekable(): - if logs.read(2) == GZIP_MAGIC_NUMBER: - content_encoding = "gzip" - logs.seek(0) - - await super()._upload(rule_id, stream=stream_name, body=logs, content_encoding=content_encoding, **kwargs) - return - - if not isinstance(logs, Sequence) or isinstance(logs, str): - raise ValueError( - "The 'logs' parameter must be a list of mappings/dictionaries or an I/O stream that is readable." - ) - - for gzip_data, log_chunk in _create_gzip_requests(cast(List[JSON], logs)): - try: - await super()._upload( # type: ignore - rule_id, stream=stream_name, body=gzip_data, content_encoding="gzip", **kwargs # type: ignore - ) - - except Exception as err: # pylint: disable=broad-except - if on_error: - await on_error(LogsUploadError(error=err, failed_logs=cast(List[Mapping[str, Any]], log_chunk))) - else: - _LOGGER.error("Failed to upload chunk containing %d log entries", len(log_chunk)) - raise err - - -__all__: List[str] = [ - "LogsIngestionClientOperationsMixin" -] # Add all objects you want publicly available to users at this package level +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_patch.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_patch.py index ef37c4edbdb2..f7dd32510333 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_patch.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_patch.py @@ -7,42 +7,8 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ from typing import List -from ._client import LogsIngestionClient as GeneratedClient - -class LogsIngestionClient(GeneratedClient): - """The asynchronous client for uploading logs to Azure Monitor. - - :param endpoint: The Data Collection Endpoint for the Data Collection Rule, for example - https://dce-name.eastus-2.ingest.monitor.azure.com. - :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword api_version: Api Version. Default value is "2023-01-01". Note that overriding - this default value may result in unsupported behavior. - :paramtype api_version: str - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_authentication_async.py - :start-after: [START create_client_public_cloud_async] - :end-before: [END create_client_public_cloud_async] - :language: python - :dedent: 4 - :caption: Creating the LogsIngestionClient with DefaultAzureCredential. - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_authentication_async.py - :start-after: [START create_client_sovereign_cloud_async] - :end-before: [END create_client_sovereign_cloud_async] - :language: python - :dedent: 4 - :caption: Creating the LogsIngestionClient for use with a sovereign cloud (i.e. non-public cloud). - """ - - -__all__: List[str] = ["LogsIngestionClient"] +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py index c6566b4a63ae..6d39ec82c2a7 100644 --- a/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py +++ b/sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_vendor.py @@ -1,7 +1,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -11,7 +11,6 @@ from ._configuration import LogsIngestionClientConfiguration if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core import AsyncPipelineClient from .._serialization import Deserializer, Serializer diff --git a/sdk/monitor/azure-monitor-ingestion/samples/async_samples/sample_custom_error_callback_async.py b/sdk/monitor/azure-monitor-ingestion/samples/async_samples/sample_custom_error_callback_async.py index 58a5c470841d..57365010be70 100644 --- a/sdk/monitor/azure-monitor-ingestion/samples/async_samples/sample_custom_error_callback_async.py +++ b/sdk/monitor/azure-monitor-ingestion/samples/async_samples/sample_custom_error_callback_async.py @@ -59,7 +59,11 @@ async def on_error_pass(_) -> None: # Sample callback that raises the error if it corresponds to a specific HTTP error code. # This aborts the rest of the upload. async def on_error_abort(error: LogsUploadError) -> None: - if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in (400, 401, 403): + if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in ( + 400, + 401, + 403, + ): print("Aborting upload...") raise error.error diff --git a/sdk/monitor/azure-monitor-ingestion/samples/sample_custom_error_callback.py b/sdk/monitor/azure-monitor-ingestion/samples/sample_custom_error_callback.py index fdaa7207bbe3..bc1efcdd2a61 100644 --- a/sdk/monitor/azure-monitor-ingestion/samples/sample_custom_error_callback.py +++ b/sdk/monitor/azure-monitor-ingestion/samples/sample_custom_error_callback.py @@ -45,7 +45,8 @@ {"Time": "2021-12-08T23:51:14.1104269Z", "Computer": "Computer2", "AdditionalContext": "context"}, ] -failed_logs: List[MutableMapping[str, str]] = [] +failed_logs: List[MutableMapping[str, str]] = [] + # Sample callback that stores the logs that failed to upload. def on_error_save(error: LogsUploadError) -> None: @@ -61,7 +62,11 @@ def on_error_pass(_) -> None: # Sample callback that raises the error if it corresponds to a specific HTTP error code. # This aborts the rest of the upload. def on_error_abort(error: LogsUploadError) -> None: - if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in (400, 401, 403): + if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in ( + 400, + 401, + 403, + ): print("Aborting upload...") raise error.error diff --git a/sdk/monitor/azure-monitor-ingestion/setup.py b/sdk/monitor/azure-monitor-ingestion/setup.py index ba017184ce6a..ddb54220e8fd 100644 --- a/sdk/monitor/azure-monitor-ingestion/setup.py +++ b/sdk/monitor/azure-monitor-ingestion/setup.py @@ -1,91 +1,71 @@ -#!/usr/bin/env python - -#------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -#-------------------------------------------------------------------------- +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# coding: utf-8 +import os import re -import os.path -from io import open -from setuptools import find_packages, setup +from setuptools import setup, find_packages + -# Change the PACKAGE_NAME only to change folder and different name PACKAGE_NAME = "azure-monitor-ingestion" PACKAGE_PPRINT_NAME = "Azure Monitor Ingestion" # a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace('-', '/') -# a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace('-', '.') - -# azure v0.x is not compatible with this package -# azure v0.x used to have a __version__ attribute (newer versions don't) -try: - import azure - try: - ver = azure.__version__ - raise Exception( - 'This package is incompatible with azure=={}. '.format(ver) + - 'Uninstall it with "pip uninstall azure".' - ) - except AttributeError: - pass -except ImportError: - pass +package_folder_path = PACKAGE_NAME.replace("-", "/") # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, 'version.py') - if os.path.exists(os.path.join(package_folder_path, 'version.py')) - else os.path.join(package_folder_path, '_version.py'), 'r') as fd: - version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', - fd.read(), re.MULTILINE).group(1) +with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: - raise RuntimeError('Cannot find version information') + raise RuntimeError("Cannot find version information") -with open('README.md', encoding='utf-8') as f: - readme = f.read() -with open('CHANGELOG.md', encoding='utf-8') as f: - changelog = f.read() setup( name=PACKAGE_NAME, version=version, - description='Microsoft {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), - long_description=readme + '\n\n' + changelog, - long_description_content_type='text/markdown', - license='MIT License', - author='Microsoft Corporation', - author_email='azpysdkhelp@microsoft.com', - url='https://github.com/Azure/azure-sdk-for-python', + description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=open("README.md", "r").read(), + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", keywords="azure, azure sdk", classifiers=[ - "Development Status :: 5 - Production/Stable", - 'Programming Language :: Python', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'License :: OSI Approved :: MIT License', + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", ], - python_requires=">=3.8", zip_safe=False, - packages=find_packages(exclude=[ - 'tests', - 'samples', - # Exclude packages that will be covered by PEP420 or nspkg - 'azure', - 'azure.monitor', - ]), + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + "azure", + "azure.monitor", + ] + ), include_package_data=True, + package_data={ + "azure.monitor.ingestion": ["py.typed"], + }, install_requires=[ - 'azure-core>=1.28.0', - 'isodate>=0.6.0', - "typing-extensions>=4.0.1" - ] + "isodate>=0.6.1", + "azure-core>=1.30.0", + "typing-extensions>=4.6.0", + ], + python_requires=">=3.8", ) diff --git a/sdk/monitor/azure-monitor-ingestion/tests/base_testcase.py b/sdk/monitor/azure-monitor-ingestion/tests/base_testcase.py index f38c9cbb6e4a..cde9b62b753f 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/base_testcase.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/base_testcase.py @@ -12,7 +12,7 @@ AUDIENCE_MAP = { "AzureCloud": "https://monitor.azure.com", "AzureChinaCloud": "https://monitor.azure.cn", - "AzureUSGovernment": "https://monitor.azure.us" + "AzureUSGovernment": "https://monitor.azure.us", } diff --git a/sdk/monitor/azure-monitor-ingestion/tests/conftest.py b/sdk/monitor/azure-monitor-ingestion/tests/conftest.py index fb21e700afb2..f2fdaab5cfd2 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/conftest.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/conftest.py @@ -32,7 +32,7 @@ add_general_regex_sanitizer, add_header_regex_sanitizer, set_custom_default_matcher, - add_oauth_response_sanitizer + add_oauth_response_sanitizer, ) @@ -60,15 +60,14 @@ def add_sanitizers(test_proxy, environment_variables): ENV_CLIENT_SECRET: TEST_ID, ENV_DCE: TEST_DCE, ENV_STREAM_NAME: TEST_STREAM_NAME, - ENV_DCR_ID: TEST_ID + ENV_DCR_ID: TEST_ID, } environment_variables.sanitize_batch(sanitization_mapping) set_custom_default_matcher( compare_bodies=False, excluded_headers="Authorization,Content-Length,x-ms-client-request-id,x-ms-request-id" ) add_general_regex_sanitizer( - value="fakeresource", - regex="(?<=\\/\\/)[a-z-]+(?=\\.westus2-1\\.ingest\\.monitor\\.azure\\.com)" + value="fakeresource", regex="(?<=\\/\\/)[a-z-]+(?=\\.westus2-1\\.ingest\\.monitor\\.azure\\.com)" ) add_body_key_sanitizer(json_path="access_token", value="fakekey") add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") @@ -80,19 +79,16 @@ def monitor_info(environment_variables): yield { "stream_name": environment_variables.get(ENV_STREAM_NAME), "dce": environment_variables.get(ENV_DCE), - "dcr_id": environment_variables.get(ENV_DCR_ID) + "dcr_id": environment_variables.get(ENV_DCR_ID), } @pytest.fixture(scope="session") def large_data(): logs = [] - content = "a" * (1024 * 100) # 100 KiB string + content = "a" * (1024 * 100) # 100 KiB string # Ensure total size is > 2 MiB data for i in range(24): - logs.append({ - "Time": datetime.now().isoformat(), - "AdditionalContext": content - }) + logs.append({"Time": datetime.now().isoformat(), "AdditionalContext": content}) return logs diff --git a/sdk/monitor/azure-monitor-ingestion/tests/perf_tests/upload_logs.py b/sdk/monitor/azure-monitor-ingestion/tests/perf_tests/upload_logs.py index 4f6b69e5fc91..176665f16b84 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/perf_tests/upload_logs.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/perf_tests/upload_logs.py @@ -1,8 +1,8 @@ -#------------------------------------------------------------------------- +# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. -#-------------------------------------------------------------------------- +# -------------------------------------------------------------------------- from datetime import datetime import json import random @@ -17,8 +17,9 @@ ALPHANUMERIC_CHARACTERS = string.ascii_letters + string.digits + def _get_random_string(length: int): - return ''.join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length)) + return "".join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length)) def _get_repeating_string(length: int): @@ -37,13 +38,9 @@ def __init__(self, arguments): self.async_credential = AsyncDefaultAzureCredential() # Create clients - self.client = LogsIngestionClient( - endpoint=self.data_collection_endpoint, - credential=self.credential - ) + self.client = LogsIngestionClient(endpoint=self.data_collection_endpoint, credential=self.credential) self.async_client = AsyncLogsIngestionClient( - endpoint=self.data_collection_endpoint, - credential=self.async_credential + endpoint=self.data_collection_endpoint, credential=self.async_credential ) async def close(self): @@ -58,36 +55,43 @@ async def setup(self): # Create log entries to upload self.logs = [] for i in range(self.args.num_logs): - content = _get_random_string(self.args.log_content_length) if self.args.random_log_content \ + content = ( + _get_random_string(self.args.log_content_length) + if self.args.random_log_content else _get_repeating_string(self.args.log_content_length) - self.logs.append({ - "Time": datetime.now().isoformat(), - "Computer": f"Computer {i}", - "AdditionalContext": content - }) - print(f'{len(json.dumps(self.logs))} bytes of logs to be uploaded.') + ) + self.logs.append( + {"Time": datetime.now().isoformat(), "Computer": f"Computer {i}", "AdditionalContext": content} + ) + print(f"{len(json.dumps(self.logs))} bytes of logs to be uploaded.") @staticmethod def add_arguments(parser): super(UploadLogsTest, UploadLogsTest).add_arguments(parser) - parser.add_argument("-n", "--num-logs", nargs="?", type=int, - help="Number of logs to be uploaded. Defaults to 100", default=100) - parser.add_argument("-l", "--log-content-length", nargs="?", type=int, - help="Length of the 'AdditionalContext' value for each log entry. Defaults to 20", default=20) - parser.add_argument("-r", "--random-log-content", action="store_true", + parser.add_argument( + "-n", "--num-logs", nargs="?", type=int, help="Number of logs to be uploaded. Defaults to 100", default=100 + ) + parser.add_argument( + "-l", + "--log-content-length", + nargs="?", + type=int, + help="Length of the 'AdditionalContext' value for each log entry. Defaults to 20", + default=20, + ) + parser.add_argument( + "-r", + "--random-log-content", + action="store_true", help="Whether to use a random alphanumeric string for each 'AdditionalContext' value. " - "If False, uses a repeating 'a' character. Defaults to False", default=False) + "If False, uses a repeating 'a' character. Defaults to False", + default=False, + ) def run_sync(self): - self.client.upload( - rule_id=self.data_collection_rule_id, - stream_name=self.stream_name, - logs=self.logs - ) + self.client.upload(rule_id=self.data_collection_rule_id, stream_name=self.stream_name, logs=self.logs) async def run_async(self): await self.async_client.upload( - rule_id=self.data_collection_rule_id, - stream_name=self.stream_name, - logs=self.logs + rule_id=self.data_collection_rule_id, stream_name=self.stream_name, logs=self.logs ) diff --git a/sdk/monitor/azure-monitor-ingestion/tests/test_helpers.py b/sdk/monitor/azure-monitor-ingestion/tests/test_helpers.py index c5cac5acb8f2..778f5dabde72 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/test_helpers.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/test_helpers.py @@ -14,17 +14,17 @@ _create_gzip_requests, _split_chunks, MAX_CHUNK_SIZE_BYTES, - GZIP_MAGIC_NUMBER + GZIP_MAGIC_NUMBER, ) ALPHANUMERIC_CHARACTERS = string.ascii_letters + string.digits -random.seed(42) # For repeatibility +random.seed(42) # For repeatibility def _get_random_string(length: int): - return ''.join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length)) + return "".join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length)) class TestHelpers: @@ -39,10 +39,10 @@ def test_split_chunks(self, content): chunks = list(_split_chunks(logs, max_size_bytes=entry_size)) assert len(chunks) == 100 - chunks = list(_split_chunks(logs, max_size_bytes=entry_size*2)) + chunks = list(_split_chunks(logs, max_size_bytes=entry_size * 2)) assert len(chunks) == 50 - chunks = list(_split_chunks(logs, max_size_bytes=entry_size*100)) + chunks = list(_split_chunks(logs, max_size_bytes=entry_size * 100)) assert len(chunks) == 1 def test_split_chunks_larger_than_max(self): @@ -58,4 +58,4 @@ def test_create_gzip_requests(self, num_entries): for compressed_bytes, raw_data in _create_gzip_requests(logs): assert len(compressed_bytes) < MAX_CHUNK_SIZE_BYTES assert compressed_bytes[:2] == GZIP_MAGIC_NUMBER - assert zlib.decompress(compressed_bytes, 16+zlib.MAX_WBITS) == json.dumps(raw_data).encode("utf-8") + assert zlib.decompress(compressed_bytes, 16 + zlib.MAX_WBITS) == json.dumps(raw_data).encode("utf-8") diff --git a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py index d194c7921233..6a921511a41f 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion.py @@ -21,19 +21,13 @@ { "Time": "2021-12-08T23:51:14.1104269Z", "Computer": "Computer1", - "AdditionalContext": { - "testContextKey": 3, - "CounterName": "AppMetric1" - } + "AdditionalContext": {"testContextKey": 3, "CounterName": "AppMetric1"}, }, { "Time": "2021-12-08T23:51:14.1104269Z", "Computer": "Computer2", - "AdditionalContext": { - "testContextKey": 2, - "CounterName": "AppMetric1" - } - } + "AdditionalContext": {"testContextKey": 2, "CounterName": "AppMetric1"}, + }, ] @@ -41,27 +35,31 @@ class TestLogsIngestionClient(LogsIngestionClientTestCase): def test_send_logs(self, recorded_test, monitor_info): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) with client: - client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=LOGS_BODY) + client.upload(rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=LOGS_BODY) def test_send_logs_large(self, recorded_test, monitor_info, large_data): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) with client: - client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=large_data) + client.upload(rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=large_data) def test_send_logs_error(self, recorded_test, monitor_info): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) body = [{"foo": "bar"}] with pytest.raises(HttpResponseError) as ex: - client.upload(rule_id='bad-rule', stream_name=monitor_info['stream_name'], logs=body) + client.upload(rule_id="bad-rule", stream_name=monitor_info["stream_name"], logs=body) def test_send_logs_error_custom(self, recorded_test, monitor_info): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) body = [{"foo": "bar"}] def on_error(e): @@ -71,39 +69,41 @@ def on_error(e): on_error.called = False - client.upload( - rule_id='bad-rule', stream_name=monitor_info['stream_name'], logs=body, on_error=on_error) + client.upload(rule_id="bad-rule", stream_name=monitor_info["stream_name"], logs=body, on_error=on_error) assert on_error.called def test_send_logs_json_file(self, recorded_test, monitor_info): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) - temp_file = str(uuid.uuid4()) + '.json' - with open(temp_file, 'w') as f: + temp_file = str(uuid.uuid4()) + ".json" + with open(temp_file, "w") as f: json.dump(LOGS_BODY, f) with client: - with open(temp_file, 'r') as f: - client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=f) + with open(temp_file, "r") as f: + client.upload(rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=f) os.remove(temp_file) @pytest.mark.live_test_only("Issues recording binary streams with test-proxy") def test_send_logs_gzip_file(self, monitor_info): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) - temp_file = str(uuid.uuid4()) + '.json.gz' - with gzip.open(temp_file, 'wb') as f: - f.write(json.dumps(LOGS_BODY).encode('utf-8')) + temp_file = str(uuid.uuid4()) + ".json.gz" + with gzip.open(temp_file, "wb") as f: + f.write(json.dumps(LOGS_BODY).encode("utf-8")) - with open(temp_file, 'rb') as f: - client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=f) + with open(temp_file, "rb") as f: + client.upload(rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=f) os.remove(temp_file) def test_abort_error_handler(self, monitor_info): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) class TestException(Exception): pass @@ -118,33 +118,38 @@ def on_error(e): with client: # No exception should be raised - with mock.patch("azure.monitor.ingestion._operations._patch.GeneratedOps._upload", - side_effect=ConnectionError): + with mock.patch( + "azure.monitor.ingestion._operations._patch.GeneratedOps._upload", side_effect=ConnectionError + ): client.upload( - rule_id=monitor_info['dcr_id'], - stream_name=monitor_info['stream_name'], + rule_id=monitor_info["dcr_id"], + stream_name=monitor_info["stream_name"], logs=LOGS_BODY, - on_error=on_error) + on_error=on_error, + ) assert on_error.called on_error.called = False # Exception should now be raised since error handler checked for RuntimeError. - with mock.patch("azure.monitor.ingestion._operations._patch.GeneratedOps._upload", - side_effect=RuntimeError): + with mock.patch( + "azure.monitor.ingestion._operations._patch.GeneratedOps._upload", side_effect=RuntimeError + ): with pytest.raises(TestException): client.upload( - rule_id=monitor_info['dcr_id'], - stream_name=monitor_info['stream_name'], + rule_id=monitor_info["dcr_id"], + stream_name=monitor_info["stream_name"], logs=LOGS_BODY, - on_error=on_error) + on_error=on_error, + ) assert on_error.called @pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None]) def test_invalid_logs_format(self, monitor_info, logs): client = self.get_client( - LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info['dce']) + LogsIngestionClient, self.get_credential(LogsIngestionClient), endpoint=monitor_info["dce"] + ) with pytest.raises(ValueError): client.upload(rule_id="rule", stream_name="stream", logs=logs) diff --git a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py index 154fd2762282..4df72825e83e 100644 --- a/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py +++ b/sdk/monitor/azure-monitor-ingestion/tests/test_logs_ingestion_async.py @@ -21,19 +21,13 @@ { "Time": "2021-12-08T23:51:14.1104269Z", "Computer": "Computer1", - "AdditionalContext": { - "testContextKey": 3, - "CounterName": "AppMetric1" - } + "AdditionalContext": {"testContextKey": 3, "CounterName": "AppMetric1"}, }, { "Time": "2021-12-08T23:51:14.1104269Z", "Computer": "Computer2", - "AdditionalContext": { - "testContextKey": 2, - "CounterName": "AppMetric1" - } - } + "AdditionalContext": {"testContextKey": 2, "CounterName": "AppMetric1"}, + }, ] @@ -42,39 +36,36 @@ class TestLogsIngestionClientAsync(LogsIngestionClientTestCase): @pytest.mark.asyncio async def test_send_logs_async(self, recorded_test, monitor_info): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client( - LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) async with client: - await client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=LOGS_BODY) + await client.upload(rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=LOGS_BODY) await credential.close() @pytest.mark.asyncio async def test_send_logs_large(self, recorded_test, monitor_info, large_data): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client( - LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) async with client: await client.upload( - rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=large_data) + rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=large_data + ) await credential.close() @pytest.mark.asyncio async def test_send_logs_error(self, recorded_test, monitor_info): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client( - LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) body = [{"foo": "bar"}] with pytest.raises(HttpResponseError) as ex: async with client: - await client.upload(rule_id='bad-rule', stream_name=monitor_info['stream_name'], logs=body) + await client.upload(rule_id="bad-rule", stream_name=monitor_info["stream_name"], logs=body) await credential.close() @pytest.mark.asyncio async def test_send_logs_error_custom(self, recorded_test, monitor_info): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client( - LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) body = [{"foo": "bar"}] async def on_error(e): @@ -86,23 +77,23 @@ async def on_error(e): async with client: await client.upload( - rule_id='bad-rule', stream_name=monitor_info['stream_name'], logs=body, on_error=on_error) + rule_id="bad-rule", stream_name=monitor_info["stream_name"], logs=body, on_error=on_error + ) assert on_error.called await credential.close() @pytest.mark.asyncio async def test_send_logs_json_file(self, recorded_test, monitor_info): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client( - LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) - temp_file = str(uuid.uuid4()) + '.json' - with open(temp_file, 'w') as f: + temp_file = str(uuid.uuid4()) + ".json" + with open(temp_file, "w") as f: json.dump(LOGS_BODY, f) async with client: - with open(temp_file, 'r') as f: - await client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=f) + with open(temp_file, "r") as f: + await client.upload(rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=f) os.remove(temp_file) await credential.close() @@ -110,24 +101,22 @@ async def test_send_logs_json_file(self, recorded_test, monitor_info): @pytest.mark.live_test_only("Issues recording binary streams with test-proxy") async def test_send_logs_gzip_file(self, monitor_info): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client( - LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) - temp_file = str(uuid.uuid4()) + '.json.gz' - with gzip.open(temp_file, 'wb') as f: - f.write(json.dumps(LOGS_BODY).encode('utf-8')) + temp_file = str(uuid.uuid4()) + ".json.gz" + with gzip.open(temp_file, "wb") as f: + f.write(json.dumps(LOGS_BODY).encode("utf-8")) async with client: - with open(temp_file, 'rb') as f: - await client.upload(rule_id=monitor_info['dcr_id'], stream_name=monitor_info['stream_name'], logs=f) + with open(temp_file, "rb") as f: + await client.upload(rule_id=monitor_info["dcr_id"], stream_name=monitor_info["stream_name"], logs=f) os.remove(temp_file) await credential.close() @pytest.mark.asyncio async def test_abort_error_handler(self, monitor_info): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client( - LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) class TestException(Exception): pass @@ -142,26 +131,30 @@ async def on_error(e): async with client: # No exception should be raised - with mock.patch("azure.monitor.ingestion.aio._operations._patch.GeneratedOps._upload", - side_effect=ConnectionError): + with mock.patch( + "azure.monitor.ingestion.aio._operations._patch.GeneratedOps._upload", side_effect=ConnectionError + ): await client.upload( - rule_id=monitor_info['dcr_id'], - stream_name=monitor_info['stream_name'], + rule_id=monitor_info["dcr_id"], + stream_name=monitor_info["stream_name"], logs=LOGS_BODY, - on_error=on_error) + on_error=on_error, + ) assert on_error.called on_error.called = False # Exception should now be raised since error handler checked for RuntimeError. - with mock.patch("azure.monitor.ingestion.aio._operations._patch.GeneratedOps._upload", - side_effect=RuntimeError): + with mock.patch( + "azure.monitor.ingestion.aio._operations._patch.GeneratedOps._upload", side_effect=RuntimeError + ): with pytest.raises(TestException): await client.upload( - rule_id=monitor_info['dcr_id'], - stream_name=monitor_info['stream_name'], + rule_id=monitor_info["dcr_id"], + stream_name=monitor_info["stream_name"], logs=LOGS_BODY, - on_error=on_error) + on_error=on_error, + ) assert on_error.called await credential.close() @@ -170,7 +163,7 @@ async def on_error(e): @pytest.mark.parametrize("logs", ['[{"foo": "bar"}]', "foo", {"foo": "bar"}, None]) async def test_invalid_logs_format(self, monitor_info, logs): credential = self.get_credential(LogsIngestionClient, is_async=True) - client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info['dce']) + client = self.get_client(LogsIngestionClient, credential, endpoint=monitor_info["dce"]) async with client: with pytest.raises(ValueError): diff --git a/sdk/monitor/azure-monitor-ingestion/tsp-location.yaml b/sdk/monitor/azure-monitor-ingestion/tsp-location.yaml new file mode 100644 index 000000000000..a1a74f58b8aa --- /dev/null +++ b/sdk/monitor/azure-monitor-ingestion/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/monitor/Ingestion +commit: 20b8ba31ffb3814d9001ad9618dc48bf3def925d +repo: Azure/azure-rest-api-specs +additionalDirectories: