diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/README.md b/sdk/streamanalytics/azure-mgmt-streamanalytics/README.md index e7ce68dba24e..686a90135e26 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/README.md +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Stream Analytics Management Client Library. -This package has been tested with Python 3.7+. +This package has been tested with Python 3.8+. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). ## _Disclaimer_ @@ -12,7 +12,7 @@ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For ### Prerequisites -- Python 3.7+ is required to use this package. +- Python 3.8+ is required to use this package. - [Azure subscription](https://azure.microsoft.com/free/) ### Install the package diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json index e3c6d37615ba..dc5d2c0c4aaf 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json @@ -1,11 +1,11 @@ { - "commit": "4792bce7667477529991457890b4a6b670e70508", + "commit": "31fa09b45b4386dadd55b67eec58894d0f4a19ff", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest": "3.9.7", + "autorest": "3.10.2", "use": [ - "@autorest/python@6.7.1", - "@autorest/modelerfour@4.26.2" + "@autorest/python@6.19.0", + "@autorest/modelerfour@4.27.0" ], - "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --tag=package-2021-10-preview --use=@autorest/python@6.7.1 --use=@autorest/modelerfour@4.26.2 --version=3.9.7 --version-tolerant=False", + "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", "readme": "specification/streamanalytics/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py index 099fe6b664b7..32b89fbe5fd1 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py @@ -8,7 +8,6 @@ from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy @@ -19,7 +18,7 @@ from azure.core.credentials import TokenCredential -class StreamAnalyticsManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes +class StreamAnalyticsManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for StreamAnalyticsManagementClient. Note that all parameters used to create this instance are saved as instance @@ -29,10 +28,14 @@ class StreamAnalyticsManagementClientConfiguration(Configuration): # pylint: di :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: - super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) + api_version: str = kwargs.pop("api_version", "2020-03-01") + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -40,8 +43,10 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs self.credential = credential self.subscription_id = subscription_id + self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-streamanalytics/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -50,9 +55,9 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = ARMChallengeAuthenticationPolicy( diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_patch.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_patch.py index f99e77fef986..17dbc073e01b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_patch.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_patch.py @@ -25,6 +25,7 @@ # # -------------------------------------------------------------------------- + # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py index 4bae2292227b..8139854b97bb 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py @@ -63,8 +63,8 @@ import isodate # type: ignore -from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback -from azure.core.serialization import NULL as AzureCoreNull +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") @@ -124,7 +124,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: pass return ET.fromstring(data_as_str) # nosec - except ET.ParseError: + except ET.ParseError as err: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken @@ -143,7 +143,9 @@ def _json_attemp(data): # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") - raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod @@ -170,13 +172,6 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], return None -try: - basestring # type: ignore - unicode_str = unicode # type: ignore -except NameError: - basestring = str - unicode_str = str - _LOGGER = logging.getLogger(__name__) try: @@ -295,7 +290,7 @@ class Model(object): _validation: Dict[str, Dict[str, Any]] = {} def __init__(self, **kwargs: Any) -> None: - self.additional_properties: Dict[str, Any] = {} + self.additional_properties: Optional[Dict[str, Any]] = {} for k in kwargs: if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -340,7 +335,7 @@ def _create_xml_node(cls): return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: - """Return the JSON that would be sent to azure from this model. + """Return the JSON that would be sent to server from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. @@ -351,7 +346,7 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore def as_dict( self, @@ -390,7 +385,7 @@ def my_key_transformer(key, attr_desc, value): :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) + return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore @classmethod def _infer_class_models(cls): @@ -415,7 +410,7 @@ def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = N :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( @@ -445,7 +440,7 @@ def from_dict( if key_extractors is None else key_extractors ) - return deserializer(cls.__name__, data, content_type=content_type) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def _flatten_subtype(cls, key, objects): @@ -545,7 +540,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None): self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -561,7 +556,7 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} + self.dependencies: Dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -649,7 +644,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs): else: # That's a basic type # Integrate namespace if necessary local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = unicode_str(new_attr) + local_node.text = str(new_attr) serialized.append(local_node) # type: ignore else: # JSON for k in reversed(keys): # type: ignore @@ -668,7 +663,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs): except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise_with_traceback(SerializationError, msg, err) + raise SerializationError(msg) from err else: return serialized @@ -710,7 +705,7 @@ def body(self, data, data_type, **kwargs): ] data = deserializer._deserialize(data_type, data) except DeserializationError as err: - raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + raise SerializationError("Unable to build a model: " + str(err)) from err return self._serialize(data, data_type, **kwargs) @@ -730,6 +725,7 @@ def url(self, name, data, data_type, **kwargs): if kwargs.get("skip_quote") is True: output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") except SerializationError: @@ -744,7 +740,7 @@ def query(self, name, data, data_type, **kwargs): :param str data_type: The type to be serialized from. :keyword bool skip_quote: Whether to skip quote the serialized result. Defaults to False. - :rtype: str + :rtype: str, list :raises: TypeError if serialization fails. :raises: ValueError if data is None """ @@ -753,7 +749,7 @@ def query(self, name, data, data_type, **kwargs): if data_type.startswith("["): internal_data_type = data_type[1:-1] do_quote = not kwargs.get("skip_quote", False) - return str(self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) @@ -804,7 +800,7 @@ def serialize_data(self, data, data_type, **kwargs): raise ValueError("No value for given attribute") try: - if data is AzureCoreNull: + if data is CoreNull: return None if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) @@ -824,7 +820,7 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." - raise_with_traceback(SerializationError, msg.format(data, data_type), err) + raise SerializationError(msg.format(data, data_type)) from err else: return self._serialize(data, **kwargs) @@ -993,7 +989,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) if obj_type is _long_type: return self.serialize_long(attr) - if obj_type is unicode_str: + if obj_type is str: return self.serialize_unicode(attr) if obj_type is datetime.datetime: return self.serialize_iso(attr) @@ -1170,10 +1166,10 @@ def serialize_iso(attr, **kwargs): return date + microseconds + "Z" except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." - raise_with_traceback(SerializationError, msg, err) + raise SerializationError(msg) from err except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." - raise_with_traceback(TypeError, msg, err) + raise TypeError(msg) from err @staticmethod def serialize_unix(attr, **kwargs): @@ -1209,7 +1205,6 @@ def rest_key_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) @@ -1230,7 +1225,6 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well - # https://github.com/Azure/msrest-for-python/issues/197 return None key = ".".join(dict_keys[1:]) @@ -1371,7 +1365,7 @@ class Deserializer(object): valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None): self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1391,7 +1385,7 @@ def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} + self.dependencies: Dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1444,12 +1438,12 @@ def _deserialize(self, target_obj, data): response, class_name = self._classify_target(target_obj, data) - if isinstance(response, basestring): + if isinstance(response, str): return self.deserialize_data(data, response) elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) - if data is None: + if data is None or data is CoreNull: return data try: attributes = response._attribute_map # type: ignore @@ -1481,7 +1475,7 @@ def _deserialize(self, target_obj, data): d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: additional_properties = self._build_additional_properties(attributes, data) return self._instantiate_model(response, d_attrs, additional_properties) @@ -1515,14 +1509,14 @@ def _classify_target(self, target, data): if target is None: return None, None - if isinstance(target, basestring): + if isinstance(target, str): try: target = self.dependencies[target] except KeyError: return target, target try: - target = target._classify(data, self.dependencies) + target = target._classify(data, self.dependencies) # type: ignore except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1578,7 +1572,7 @@ def _unpack_content(raw_data, content_type=None): if hasattr(raw_data, "_content_consumed"): return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore return raw_data @@ -1652,7 +1646,7 @@ def deserialize_data(self, data, data_type): except (ValueError, TypeError, AttributeError) as err: msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return self._deserialize(obj_type, data) @@ -1700,7 +1694,7 @@ def deserialize_object(self, attr, **kwargs): if isinstance(attr, ET.Element): # Do no recurse on XML, just return the tree as-is return attr - if isinstance(attr, basestring): + if isinstance(attr, str): return self.deserialize_basic(attr, "str") obj_type = type(attr) if obj_type in self.basic_types: @@ -1757,7 +1751,7 @@ def deserialize_basic(self, attr, data_type): if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, basestring): + elif isinstance(attr, str): if attr.lower() in ["true", "1"]: return True elif attr.lower() in ["false", "0"]: @@ -1808,7 +1802,6 @@ def deserialize_enum(data, enum_obj): data = data.value if isinstance(data, int): # Workaround. We might consider remove it in the future. - # https://github.com/Azure/azure-rest-api-specs/issues/141 try: return list(enum_obj.__members__.values())[data] except IndexError: @@ -1862,10 +1855,10 @@ def deserialize_decimal(attr): if isinstance(attr, ET.Element): attr = attr.text try: - return decimal.Decimal(attr) # type: ignore + return decimal.Decimal(str(attr)) # type: ignore except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err @staticmethod def deserialize_long(attr): @@ -1893,7 +1886,7 @@ def deserialize_duration(attr): duration = isodate.parse_duration(attr) except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return duration @@ -1910,7 +1903,7 @@ def deserialize_date(attr): if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore raise DeserializationError("Date must have only digits and -. Received: %s" % attr) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) @staticmethod def deserialize_time(attr): @@ -1945,7 +1938,7 @@ def deserialize_rfc(attr): date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj @@ -1982,7 +1975,7 @@ def deserialize_iso(attr): raise OverflowError("Hit max or min date") except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj @@ -1998,9 +1991,10 @@ def deserialize_unix(attr): if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore try: + attr = int(attr) date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) except ValueError as err: msg = "Cannot deserialize to unix datetime object." - raise_with_traceback(DeserializationError, msg, err) + raise DeserializationError(msg) from err else: return date_obj diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py index 93363ad9128d..e228f8cdd11f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py @@ -8,9 +8,12 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self +from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient +from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy from . import models as _models from ._configuration import StreamAnalyticsManagementClientConfiguration @@ -22,7 +25,6 @@ Operations, OutputsOperations, PrivateEndpointsOperations, - SkuOperations, StreamingJobsOperations, SubscriptionsOperations, TransformationsOperations, @@ -36,22 +38,20 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: azure.mgmt.streamanalytics.operations.FunctionsOperations - :ivar inputs: InputsOperations operations - :vartype inputs: azure.mgmt.streamanalytics.operations.InputsOperations - :ivar outputs: OutputsOperations operations - :vartype outputs: azure.mgmt.streamanalytics.operations.OutputsOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.streamanalytics.operations.Operations :ivar streaming_jobs: StreamingJobsOperations operations :vartype streaming_jobs: azure.mgmt.streamanalytics.operations.StreamingJobsOperations - :ivar sku: SkuOperations operations - :vartype sku: azure.mgmt.streamanalytics.operations.SkuOperations - :ivar subscriptions: SubscriptionsOperations operations - :vartype subscriptions: azure.mgmt.streamanalytics.operations.SubscriptionsOperations + :ivar inputs: InputsOperations operations + :vartype inputs: azure.mgmt.streamanalytics.operations.InputsOperations + :ivar outputs: OutputsOperations operations + :vartype outputs: azure.mgmt.streamanalytics.operations.OutputsOperations :ivar transformations: TransformationsOperations operations :vartype transformations: azure.mgmt.streamanalytics.operations.TransformationsOperations + :ivar functions: FunctionsOperations operations + :vartype functions: azure.mgmt.streamanalytics.operations.FunctionsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: azure.mgmt.streamanalytics.operations.SubscriptionsOperations :ivar clusters: ClustersOperations operations :vartype clusters: azure.mgmt.streamanalytics.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations @@ -62,6 +62,9 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-ver :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ @@ -76,26 +79,43 @@ def __init__( self._config = StreamAnalyticsManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + ARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) - self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.streaming_jobs = StreamingJobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.sku = SkuOperations(self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.transformations = TransformationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize ) - def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -115,12 +135,12 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: self._client.close() - def __enter__(self) -> "StreamAnalyticsManagementClient": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py deleted file mode 100644 index 0dafe0e287ff..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py index 2eda20789583..e5754a47ce68 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.0.0b2" +VERSION = "1.0.0b1" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py index 53f46a4c1964..d915bfbfc402 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py @@ -8,7 +8,6 @@ from typing import Any, TYPE_CHECKING -from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy @@ -19,7 +18,7 @@ from azure.core.credentials_async import AsyncTokenCredential -class StreamAnalyticsManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes +class StreamAnalyticsManagementClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long """Configuration for StreamAnalyticsManagementClient. Note that all parameters used to create this instance are saved as instance @@ -29,10 +28,14 @@ class StreamAnalyticsManagementClientConfiguration(Configuration): # pylint: di :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: - super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) + api_version: str = kwargs.pop("api_version", "2020-03-01") + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -40,8 +43,10 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k self.credential = credential self.subscription_id = subscription_id + self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-streamanalytics/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: @@ -50,9 +55,9 @@ def _configure(self, **kwargs: Any) -> None: self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_patch.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_patch.py index f99e77fef986..17dbc073e01b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_patch.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_patch.py @@ -25,6 +25,7 @@ # # -------------------------------------------------------------------------- + # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py index 73b17edb2232..31de0a8eb7c0 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py @@ -8,9 +8,12 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self +from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient +from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy from .. import models as _models from .._serialization import Deserializer, Serializer @@ -22,7 +25,6 @@ Operations, OutputsOperations, PrivateEndpointsOperations, - SkuOperations, StreamingJobsOperations, SubscriptionsOperations, TransformationsOperations, @@ -36,22 +38,20 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: azure.mgmt.streamanalytics.aio.operations.FunctionsOperations - :ivar inputs: InputsOperations operations - :vartype inputs: azure.mgmt.streamanalytics.aio.operations.InputsOperations - :ivar outputs: OutputsOperations operations - :vartype outputs: azure.mgmt.streamanalytics.aio.operations.OutputsOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.streamanalytics.aio.operations.Operations :ivar streaming_jobs: StreamingJobsOperations operations :vartype streaming_jobs: azure.mgmt.streamanalytics.aio.operations.StreamingJobsOperations - :ivar sku: SkuOperations operations - :vartype sku: azure.mgmt.streamanalytics.aio.operations.SkuOperations - :ivar subscriptions: SubscriptionsOperations operations - :vartype subscriptions: azure.mgmt.streamanalytics.aio.operations.SubscriptionsOperations + :ivar inputs: InputsOperations operations + :vartype inputs: azure.mgmt.streamanalytics.aio.operations.InputsOperations + :ivar outputs: OutputsOperations operations + :vartype outputs: azure.mgmt.streamanalytics.aio.operations.OutputsOperations :ivar transformations: TransformationsOperations operations :vartype transformations: azure.mgmt.streamanalytics.aio.operations.TransformationsOperations + :ivar functions: FunctionsOperations operations + :vartype functions: azure.mgmt.streamanalytics.aio.operations.FunctionsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: azure.mgmt.streamanalytics.aio.operations.SubscriptionsOperations :ivar clusters: ClustersOperations operations :vartype clusters: azure.mgmt.streamanalytics.aio.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations @@ -63,6 +63,9 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-ver :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ @@ -77,26 +80,45 @@ def __init__( self._config = StreamAnalyticsManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + AsyncARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, policies=_policies, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) - self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.streaming_jobs = StreamingJobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.sku = SkuOperations(self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.transformations = TransformationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize ) - def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: + def _send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -116,12 +138,12 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncH request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, **kwargs) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "StreamAnalyticsManagementClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py index 729eeb5cd6df..59ddeae83d21 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py @@ -6,14 +6,13 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations -from ._inputs_operations import InputsOperations -from ._outputs_operations import OutputsOperations from ._operations import Operations from ._streaming_jobs_operations import StreamingJobsOperations -from ._sku_operations import SkuOperations -from ._subscriptions_operations import SubscriptionsOperations +from ._inputs_operations import InputsOperations +from ._outputs_operations import OutputsOperations from ._transformations_operations import TransformationsOperations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations @@ -22,14 +21,13 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ - "FunctionsOperations", - "InputsOperations", - "OutputsOperations", "Operations", "StreamingJobsOperations", - "SkuOperations", - "SubscriptionsOperations", + "InputsOperations", + "OutputsOperations", "TransformationsOperations", + "FunctionsOperations", + "SubscriptionsOperations", "ClustersOperations", "PrivateEndpointsOperations", ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py index f1e30725f336..2da8a562b5ef 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._clusters_operations import ( build_create_or_update_request, build_delete_request, @@ -40,6 +42,10 @@ build_update_request, ) +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -67,12 +73,12 @@ async def _create_or_update_initial( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any - ) -> _models.Cluster: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -83,9 +89,9 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -95,7 +101,7 @@ async def _create_or_update_initial( else: _json = self._serialize.body(cluster, "Cluster") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -105,40 +111,35 @@ async def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("Cluster", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } - @overload async def begin_create_or_update( self, @@ -172,14 +173,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.Cluster] @@ -191,7 +184,7 @@ async def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - cluster: IO, + cluster: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -207,7 +200,7 @@ async def begin_create_or_update( :type cluster_name: str :param cluster: The definition of the cluster that will be used to create a new cluster or replace the existing one. Required. - :type cluster: IO + :type cluster: IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -219,14 +212,6 @@ async def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.Cluster] @@ -238,7 +223,7 @@ async def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -251,8 +236,8 @@ async def begin_create_or_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The definition of the cluster that will be used to create a new cluster or - replace the existing one. Is either a Cluster type or a IO type. Required. - :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO + replace the existing one. Is either a Cluster type or a IO[bytes] type. Required. + :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -261,17 +246,6 @@ async def begin_create_or_update( an existing record set. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.Cluster] @@ -280,7 +254,7 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -300,12 +274,13 @@ async def begin_create_or_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -315,27 +290,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return AsyncLROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) async def _update_initial( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any - ) -> Optional[_models.Cluster]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -346,9 +319,9 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Cluster]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -358,7 +331,7 @@ async def _update_initial( else: _json = self._serialize.body(cluster, "Cluster") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -367,37 +340,34 @@ async def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return deserialized # type: ignore @overload async def begin_update( @@ -428,14 +398,6 @@ async def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.Cluster] @@ -447,7 +409,7 @@ async def begin_update( self, resource_group_name: str, cluster_name: str, - cluster: IO, + cluster: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -463,7 +425,7 @@ async def begin_update( :type cluster_name: str :param cluster: The properties specified here will overwrite the corresponding properties in the existing cluster (ie. Those properties will be updated). Required. - :type cluster: IO + :type cluster: IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -471,14 +433,6 @@ async def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.Cluster] @@ -490,7 +444,7 @@ async def begin_update( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> AsyncLROPoller[_models.Cluster]: @@ -503,24 +457,13 @@ async def begin_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The properties specified here will overwrite the corresponding properties in - the existing cluster (ie. Those properties will be updated). Is either a Cluster type or a IO - type. Required. - :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO + the existing cluster (ie. Those properties will be updated). Is either a Cluster type or a + IO[bytes] type. Required. + :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.Cluster] @@ -529,7 +472,7 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -548,12 +491,13 @@ async def begin_update( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -563,17 +507,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return AsyncLROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace_async async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _models.Cluster: @@ -584,12 +526,11 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Cluster or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Cluster :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -600,24 +541,22 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -627,21 +566,15 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: - error_map = { + async def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -652,39 +585,42 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -695,14 +631,6 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -710,13 +638,13 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -725,11 +653,12 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -738,23 +667,18 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: """Lists all of the clusters in the given subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -762,10 +686,10 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Cluster" _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -776,22 +700,30 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Cluster" def prepare_request(next_link=None): if not next_link: - request = build_list_by_subscription_request( + _request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_subscription.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -801,11 +733,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -818,10 +750,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_subscription.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters" - } - @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: """Lists all of the clusters in the given resource group. @@ -829,7 +757,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -837,10 +764,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -851,23 +778,31 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -877,11 +812,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -894,10 +829,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters" - } - @distributed_trace def list_streaming_jobs( self, resource_group_name: str, cluster_name: str, **kwargs: Any @@ -909,7 +840,6 @@ def list_streaming_jobs( :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ClusterJob or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.ClusterJob] :raises ~azure.core.exceptions.HttpResponseError: @@ -917,10 +847,10 @@ def list_streaming_jobs( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterJobListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -931,24 +861,32 @@ def list_streaming_jobs( def prepare_request(next_link=None): if not next_link: - request = build_list_streaming_jobs_request( + _request = build_list_streaming_jobs_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_streaming_jobs.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterJobListResult", pipeline_response) @@ -958,11 +896,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -974,7 +912,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_streaming_jobs.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs" - } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py index f67e7786e309..9bb7ce4d1546 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._functions_operations import ( build_create_or_replace_request, build_delete_request, @@ -40,6 +42,10 @@ build_update_request, ) +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -99,7 +105,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -111,7 +116,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, function_name: str, - function: IO, + function: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -129,7 +134,7 @@ async def create_or_replace( :type function_name: str :param function: The definition of the function that will be used to create a new function or replace the existing one under the streaming job. Required. - :type function: IO + :type function: IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -141,7 +146,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -153,7 +157,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, function_name: str, - function: Union[_models.Function, IO], + function: Union[_models.Function, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -168,9 +172,9 @@ async def create_or_replace( :param function_name: The name of the function. Required. :type function_name: str :param function: The definition of the function that will be used to create a new function or - replace the existing one under the streaming job. Is either a Function type or a IO type. - Required. - :type function: ~azure.mgmt.streamanalytics.models.Function or IO + replace the existing one under the streaming job. Is either a Function type or a IO[bytes] + type. Required. + :type function: ~azure.mgmt.streamanalytics.models.Function or IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -179,15 +183,11 @@ async def create_or_replace( an existing function. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -198,7 +198,7 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) @@ -210,7 +210,7 @@ async def create_or_replace( else: _json = self._serialize.body(function, "Function") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -221,16 +221,14 @@ async def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -241,25 +239,15 @@ async def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Function", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } - @overload async def update( self, @@ -295,7 +283,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -307,7 +294,7 @@ async def update( resource_group_name: str, job_name: str, function_name: str, - function: IO, + function: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -328,7 +315,7 @@ async def update( corresponding properties in the existing function (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation. Required. - :type function: IO + :type function: IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -336,7 +323,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -348,7 +334,7 @@ async def update( resource_group_name: str, job_name: str, function_name: str, - function: Union[_models.Function, IO], + function: Union[_models.Function, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Function: @@ -367,21 +353,17 @@ async def update( corresponding properties in the existing function (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation. Is either a - Function type or a IO type. Required. - :type function: ~azure.mgmt.streamanalytics.models.Function or IO + Function type or a IO[bytes] type. Required. + :type function: ~azure.mgmt.streamanalytics.models.Function or IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -392,7 +374,7 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) @@ -404,7 +386,7 @@ async def update( else: _json = self._serialize.body(function, "Function") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -414,16 +396,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -436,16 +416,12 @@ async def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -460,12 +436,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type job_name: str :param function_name: The name of the function. Required. :type function_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -476,25 +451,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -505,11 +478,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def get(self, resource_group_name: str, job_name: str, function_name: str, **kwargs: Any) -> _models.Function: @@ -522,12 +491,11 @@ async def get(self, resource_group_name: str, job_name: str, function_name: str, :type job_name: str :param function_name: The name of the function. Required. :type function_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -538,25 +506,23 @@ async def get(self, resource_group_name: str, job_name: str, function_name: str, _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -569,16 +535,12 @@ async def get(self, resource_group_name: str, job_name: str, function_name: str, response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } + return deserialized # type: ignore @distributed_trace def list_by_streaming_job( @@ -592,11 +554,10 @@ def list_by_streaming_job( :param job_name: The name of the streaming job. Required. :type job_name: str :param select: The $select OData query parameter. This is a comma-separated list of structural - properties to include in the response, or "\ *" to include all properties. By default, all - properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + properties to include in the response, or "\\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\\ ' as a valid value. Default value is None. :type select: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Function or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.Function] :raises ~azure.core.exceptions.HttpResponseError: @@ -604,10 +565,10 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FunctionListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -618,25 +579,33 @@ def list_by_streaming_job( def prepare_request(next_link=None): if not next_link: - request = build_list_by_streaming_job_request( + _request = build_list_by_streaming_job_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, select=select, api_version=api_version, - template_url=self.list_by_streaming_job.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("FunctionListResult", pipeline_response) @@ -646,11 +615,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -663,19 +632,15 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions" - } - async def _test_initial( self, resource_group_name: str, job_name: str, function_name: str, - function: Optional[Union[_models.Function, IO]] = None, + function: Optional[Union[_models.Function, IO[bytes]]] = None, **kwargs: Any - ) -> Optional[_models.ResourceTestStatus]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -686,9 +651,9 @@ async def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -701,7 +666,7 @@ async def _test_initial( else: _json = None - request = build_test_request( + _request = build_test_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -710,37 +675,34 @@ async def _test_initial( content_type=content_type, json=_json, content=_content, - template_url=self._test_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _test_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" - } + return deserialized # type: ignore @overload async def begin_test( @@ -774,14 +736,6 @@ async def begin_test( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -795,7 +749,7 @@ async def begin_test( resource_group_name: str, job_name: str, function_name: str, - function: Optional[IO] = None, + function: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -817,18 +771,10 @@ async def begin_test( properties specified will overwrite the corresponding properties in the existing function (exactly like a PATCH operation) and the resulting function will be tested. Default value is None. - :type function: IO + :type function: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -842,7 +788,7 @@ async def begin_test( resource_group_name: str, job_name: str, function_name: str, - function: Optional[Union[_models.Function, IO]] = None, + function: Optional[Union[_models.Function, IO[bytes]]] = None, **kwargs: Any ) -> AsyncLROPoller[_models.ResourceTestStatus]: """Tests if the information provided for a function is valid. This can range from testing the @@ -861,19 +807,8 @@ async def begin_test( this parameter can be left null to test the existing function as is or if specified, the properties specified will overwrite the corresponding properties in the existing function (exactly like a PATCH operation) and the resulting function will be tested. Is either a - Function type or a IO type. Default value is None. - :type function: ~azure.mgmt.streamanalytics.models.Function or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + Function type or a IO[bytes] type. Default value is None. + :type function: ~azure.mgmt.streamanalytics.models.Function or IO[bytes] :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -883,7 +818,7 @@ async def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -902,12 +837,13 @@ async def begin_test( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = self._deserialize("ResourceTestStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -917,17 +853,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.ResourceTestStatus].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" - } + return AsyncLROPoller[_models.ResourceTestStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @overload async def retrieve_default_definition( @@ -958,7 +892,6 @@ async def retrieve_default_definition( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -970,7 +903,7 @@ async def retrieve_default_definition( resource_group_name: str, job_name: str, function_name: str, - function_retrieve_default_definition_parameters: Optional[IO] = None, + function_retrieve_default_definition_parameters: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -986,11 +919,10 @@ async def retrieve_default_definition( :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of function to retrieve the default definition for. Default value is None. - :type function_retrieve_default_definition_parameters: IO + :type function_retrieve_default_definition_parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -1003,7 +935,7 @@ async def retrieve_default_definition( job_name: str, function_name: str, function_retrieve_default_definition_parameters: Optional[ - Union[_models.FunctionRetrieveDefaultDefinitionParameters, IO] + Union[_models.FunctionRetrieveDefaultDefinitionParameters, IO[bytes]] ] = None, **kwargs: Any ) -> _models.Function: @@ -1018,18 +950,14 @@ async def retrieve_default_definition( :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of function to retrieve the default definition for. Is either a - FunctionRetrieveDefaultDefinitionParameters type or a IO type. Default value is None. + FunctionRetrieveDefaultDefinitionParameters type or a IO[bytes] type. Default value is None. :type function_retrieve_default_definition_parameters: - ~azure.mgmt.streamanalytics.models.FunctionRetrieveDefaultDefinitionParameters or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + ~azure.mgmt.streamanalytics.models.FunctionRetrieveDefaultDefinitionParameters or IO[bytes] :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1040,7 +968,7 @@ async def retrieve_default_definition( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) @@ -1057,7 +985,7 @@ async def retrieve_default_definition( else: _json = None - request = build_retrieve_default_definition_request( + _request = build_retrieve_default_definition_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -1066,16 +994,14 @@ async def retrieve_default_definition( content_type=content_type, json=_json, content=_content, - template_url=self.retrieve_default_definition.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1085,13 +1011,9 @@ async def retrieve_default_definition( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - retrieve_default_definition.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition" - } + return deserialized # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py index 62121e245db4..e2e896b9ca69 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._inputs_operations import ( build_create_or_replace_request, build_delete_request, @@ -39,6 +41,10 @@ build_update_request, ) +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -98,7 +104,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -110,7 +115,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, input_name: str, - input: IO, + input: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -128,7 +133,7 @@ async def create_or_replace( :type input_name: str :param input: The definition of the input that will be used to create a new input or replace the existing one under the streaming job. Required. - :type input: IO + :type input: IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -140,7 +145,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -152,7 +156,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, input_name: str, - input: Union[_models.Input, IO], + input: Union[_models.Input, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -167,8 +171,8 @@ async def create_or_replace( :param input_name: The name of the input. Required. :type input_name: str :param input: The definition of the input that will be used to create a new input or replace - the existing one under the streaming job. Is either a Input type or a IO type. Required. - :type input: ~azure.mgmt.streamanalytics.models.Input or IO + the existing one under the streaming job. Is either a Input type or a IO[bytes] type. Required. + :type input: ~azure.mgmt.streamanalytics.models.Input or IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -177,15 +181,11 @@ async def create_or_replace( existing input. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -196,7 +196,7 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Input] = kwargs.pop("cls", None) @@ -208,7 +208,7 @@ async def create_or_replace( else: _json = self._serialize.body(input, "Input") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -219,16 +219,14 @@ async def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -239,25 +237,15 @@ async def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Input", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Input", pipeline_response) + deserialized = self._deserialize("Input", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } - @overload async def update( self, @@ -293,7 +281,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -305,7 +292,7 @@ async def update( resource_group_name: str, job_name: str, input_name: str, - input: IO, + input: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -326,7 +313,7 @@ async def update( properties in the existing input (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Required. - :type input: IO + :type input: IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -334,7 +321,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -346,7 +332,7 @@ async def update( resource_group_name: str, job_name: str, input_name: str, - input: Union[_models.Input, IO], + input: Union[_models.Input, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Input: @@ -365,21 +351,17 @@ async def update( properties in the existing input (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Is either a Input type or a - IO type. Required. - :type input: ~azure.mgmt.streamanalytics.models.Input or IO + IO[bytes] type. Required. + :type input: ~azure.mgmt.streamanalytics.models.Input or IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -390,7 +372,7 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Input] = kwargs.pop("cls", None) @@ -402,7 +384,7 @@ async def update( else: _json = self._serialize.body(input, "Input") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -412,16 +394,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -434,16 +414,12 @@ async def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Input", pipeline_response) + deserialized = self._deserialize("Input", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -458,12 +434,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type job_name: str :param input_name: The name of the input. Required. :type input_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -474,25 +449,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -503,11 +476,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs: Any) -> _models.Input: @@ -520,12 +489,11 @@ async def get(self, resource_group_name: str, job_name: str, input_name: str, ** :type job_name: str :param input_name: The name of the input. Required. :type input_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -536,25 +504,23 @@ async def get(self, resource_group_name: str, job_name: str, input_name: str, ** _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Input] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -567,16 +533,12 @@ async def get(self, resource_group_name: str, job_name: str, input_name: str, ** response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Input", pipeline_response) + deserialized = self._deserialize("Input", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } + return deserialized # type: ignore @distributed_trace def list_by_streaming_job( @@ -590,11 +552,10 @@ def list_by_streaming_job( :param job_name: The name of the streaming job. Required. :type job_name: str :param select: The $select OData query parameter. This is a comma-separated list of structural - properties to include in the response, or "\ *" to include all properties. By default, all - properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + properties to include in the response, or "\\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\\ ' as a valid value. Default value is None. :type select: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Input or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.Input] :raises ~azure.core.exceptions.HttpResponseError: @@ -602,10 +563,10 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.InputListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -616,25 +577,33 @@ def list_by_streaming_job( def prepare_request(next_link=None): if not next_link: - request = build_list_by_streaming_job_request( + _request = build_list_by_streaming_job_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, select=select, api_version=api_version, - template_url=self.list_by_streaming_job.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("InputListResult", pipeline_response) @@ -644,11 +613,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -661,19 +630,15 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs" - } - async def _test_initial( self, resource_group_name: str, job_name: str, input_name: str, - input: Optional[Union[_models.Input, IO]] = None, + input: Optional[Union[_models.Input, IO[bytes]]] = None, **kwargs: Any - ) -> Optional[_models.ResourceTestStatus]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -684,9 +649,9 @@ async def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -699,7 +664,7 @@ async def _test_initial( else: _json = None - request = build_test_request( + _request = build_test_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -708,37 +673,34 @@ async def _test_initial( content_type=content_type, json=_json, content=_content, - template_url=self._test_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _test_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" - } + return deserialized # type: ignore @overload async def begin_test( @@ -770,14 +732,6 @@ async def begin_test( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -791,7 +745,7 @@ async def begin_test( resource_group_name: str, job_name: str, input_name: str, - input: Optional[IO] = None, + input: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -811,18 +765,10 @@ async def begin_test( parameter can be left null to test the existing input as is or if specified, the properties specified will overwrite the corresponding properties in the existing input (exactly like a PATCH operation) and the resulting input will be tested. Default value is None. - :type input: IO + :type input: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -836,7 +782,7 @@ async def begin_test( resource_group_name: str, job_name: str, input_name: str, - input: Optional[Union[_models.Input, IO]] = None, + input: Optional[Union[_models.Input, IO[bytes]]] = None, **kwargs: Any ) -> AsyncLROPoller[_models.ResourceTestStatus]: """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics @@ -853,20 +799,9 @@ async def begin_test( full input definition intended to be tested. If the input specified already exists, this parameter can be left null to test the existing input as is or if specified, the properties specified will overwrite the corresponding properties in the existing input (exactly like a - PATCH operation) and the resulting input will be tested. Is either a Input type or a IO type. - Default value is None. - :type input: ~azure.mgmt.streamanalytics.models.Input or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + PATCH operation) and the resulting input will be tested. Is either a Input type or a IO[bytes] + type. Default value is None. + :type input: ~azure.mgmt.streamanalytics.models.Input or IO[bytes] :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -876,7 +811,7 @@ async def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -895,12 +830,13 @@ async def begin_test( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = self._deserialize("ResourceTestStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -910,14 +846,12 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.ResourceTestStatus].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" - } + return AsyncLROPoller[_models.ResourceTestStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py index dfe7e72d6dd8..20cb3ffda772 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,7 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import sys +from typing import Any, AsyncIterable, Callable, Dict, Optional, Type, TypeVar +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -18,16 +20,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operations import build_list_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -55,7 +59,6 @@ def __init__(self, *args, **kwargs) -> None: def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: """Lists all of the available Stream Analytics related operations. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Operation or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.Operation] :raises ~azure.core.exceptions.HttpResponseError: @@ -63,10 +66,10 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -77,21 +80,29 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) @@ -101,11 +112,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -117,5 +128,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list.metadata = {"url": "/providers/Microsoft.StreamAnalytics/operations"} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py index 8206ad908565..c91a77b9170a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._outputs_operations import ( build_create_or_replace_request, build_delete_request, @@ -39,6 +41,10 @@ build_update_request, ) +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -98,7 +104,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -110,7 +115,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, output_name: str, - output: IO, + output: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -128,7 +133,7 @@ async def create_or_replace( :type output_name: str :param output: The definition of the output that will be used to create a new output or replace the existing one under the streaming job. Required. - :type output: IO + :type output: IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -140,7 +145,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -152,7 +156,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, output_name: str, - output: Union[_models.Output, IO], + output: Union[_models.Output, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -167,8 +171,9 @@ async def create_or_replace( :param output_name: The name of the output. Required. :type output_name: str :param output: The definition of the output that will be used to create a new output or replace - the existing one under the streaming job. Is either a Output type or a IO type. Required. - :type output: ~azure.mgmt.streamanalytics.models.Output or IO + the existing one under the streaming job. Is either a Output type or a IO[bytes] type. + Required. + :type output: ~azure.mgmt.streamanalytics.models.Output or IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -177,15 +182,11 @@ async def create_or_replace( an existing output. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -196,7 +197,7 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Output] = kwargs.pop("cls", None) @@ -208,7 +209,7 @@ async def create_or_replace( else: _json = self._serialize.body(output, "Output") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -219,16 +220,14 @@ async def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -239,25 +238,15 @@ async def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Output", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Output", pipeline_response) + deserialized = self._deserialize("Output", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } - @overload async def update( self, @@ -293,7 +282,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -305,7 +293,7 @@ async def update( resource_group_name: str, job_name: str, output_name: str, - output: IO, + output: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -326,7 +314,7 @@ async def update( properties in the existing output (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing output will remain the same and not change as a result of this PATCH operation. Required. - :type output: IO + :type output: IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -334,7 +322,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -346,7 +333,7 @@ async def update( resource_group_name: str, job_name: str, output_name: str, - output: Union[_models.Output, IO], + output: Union[_models.Output, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Output: @@ -365,21 +352,17 @@ async def update( properties in the existing output (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing output will remain the same and not change as a result of this PATCH operation. Is either a Output type or - a IO type. Required. - :type output: ~azure.mgmt.streamanalytics.models.Output or IO + a IO[bytes] type. Required. + :type output: ~azure.mgmt.streamanalytics.models.Output or IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -390,7 +373,7 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Output] = kwargs.pop("cls", None) @@ -402,7 +385,7 @@ async def update( else: _json = self._serialize.body(output, "Output") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -412,16 +395,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -434,16 +415,12 @@ async def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Output", pipeline_response) + deserialized = self._deserialize("Output", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } + return deserialized # type: ignore @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -458,12 +435,11 @@ async def delete( # pylint: disable=inconsistent-return-statements :type job_name: str :param output_name: The name of the output. Required. :type output_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -474,25 +450,23 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -503,11 +477,7 @@ async def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace_async async def get(self, resource_group_name: str, job_name: str, output_name: str, **kwargs: Any) -> _models.Output: @@ -520,12 +490,11 @@ async def get(self, resource_group_name: str, job_name: str, output_name: str, * :type job_name: str :param output_name: The name of the output. Required. :type output_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -536,25 +505,23 @@ async def get(self, resource_group_name: str, job_name: str, output_name: str, * _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Output] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -567,16 +534,12 @@ async def get(self, resource_group_name: str, job_name: str, output_name: str, * response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Output", pipeline_response) + deserialized = self._deserialize("Output", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } + return deserialized # type: ignore @distributed_trace def list_by_streaming_job( @@ -590,11 +553,10 @@ def list_by_streaming_job( :param job_name: The name of the streaming job. Required. :type job_name: str :param select: The $select OData query parameter. This is a comma-separated list of structural - properties to include in the response, or "\ *" to include all properties. By default, all - properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + properties to include in the response, or "\\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\\ ' as a valid value. Default value is None. :type select: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Output or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.Output] :raises ~azure.core.exceptions.HttpResponseError: @@ -602,10 +564,10 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OutputListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -616,25 +578,33 @@ def list_by_streaming_job( def prepare_request(next_link=None): if not next_link: - request = build_list_by_streaming_job_request( + _request = build_list_by_streaming_job_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, select=select, api_version=api_version, - template_url=self.list_by_streaming_job.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("OutputListResult", pipeline_response) @@ -644,11 +614,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -661,19 +631,15 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs" - } - async def _test_initial( self, resource_group_name: str, job_name: str, output_name: str, - output: Optional[Union[_models.Output, IO]] = None, + output: Optional[Union[_models.Output, IO[bytes]]] = None, **kwargs: Any - ) -> Optional[_models.ResourceTestStatus]: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -684,9 +650,9 @@ async def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -699,7 +665,7 @@ async def _test_initial( else: _json = None - request = build_test_request( + _request = build_test_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -708,37 +674,34 @@ async def _test_initial( content_type=content_type, json=_json, content=_content, - template_url=self._test_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _test_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" - } + return deserialized # type: ignore @overload async def begin_test( @@ -770,14 +733,6 @@ async def begin_test( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -791,7 +746,7 @@ async def begin_test( resource_group_name: str, job_name: str, output_name: str, - output: Optional[IO] = None, + output: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -811,18 +766,10 @@ async def begin_test( parameter can be left null to test the existing output as is or if specified, the properties specified will overwrite the corresponding properties in the existing output (exactly like a PATCH operation) and the resulting output will be tested. Default value is None. - :type output: IO + :type output: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -836,7 +783,7 @@ async def begin_test( resource_group_name: str, job_name: str, output_name: str, - output: Optional[Union[_models.Output, IO]] = None, + output: Optional[Union[_models.Output, IO[bytes]]] = None, **kwargs: Any ) -> AsyncLROPoller[_models.ResourceTestStatus]: """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics @@ -853,20 +800,9 @@ async def begin_test( full output definition intended to be tested. If the output specified already exists, this parameter can be left null to test the existing output as is or if specified, the properties specified will overwrite the corresponding properties in the existing output (exactly like a - PATCH operation) and the resulting output will be tested. Is either a Output type or a IO type. - Default value is None. - :type output: ~azure.mgmt.streamanalytics.models.Output or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + PATCH operation) and the resulting output will be tested. Is either a Output type or a + IO[bytes] type. Default value is None. + :type output: ~azure.mgmt.streamanalytics.models.Output or IO[bytes] :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: @@ -876,7 +812,7 @@ async def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -895,12 +831,13 @@ async def begin_test( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = self._deserialize("ResourceTestStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -910,14 +847,12 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.ResourceTestStatus].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" - } + return AsyncLROPoller[_models.ResourceTestStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py index 75f3f643a83b..c428af76a7a6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_endpoints_operations import ( build_create_or_update_request, build_delete_request, @@ -37,6 +39,10 @@ build_list_by_cluster_request, ) +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -96,7 +102,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: @@ -108,7 +113,7 @@ async def create_or_update( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - private_endpoint: IO, + private_endpoint: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -126,7 +131,7 @@ async def create_or_update( :type private_endpoint_name: str :param private_endpoint: The definition of the private endpoint that will be used to create a new cluster or replace the existing one. Required. - :type private_endpoint: IO + :type private_endpoint: IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -138,7 +143,6 @@ async def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: @@ -150,7 +154,7 @@ async def create_or_update( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - private_endpoint: Union[_models.PrivateEndpoint, IO], + private_endpoint: Union[_models.PrivateEndpoint, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -165,9 +169,9 @@ async def create_or_update( :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str :param private_endpoint: The definition of the private endpoint that will be used to create a - new cluster or replace the existing one. Is either a PrivateEndpoint type or a IO type. + new cluster or replace the existing one. Is either a PrivateEndpoint type or a IO[bytes] type. Required. - :type private_endpoint: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or IO + :type private_endpoint: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -176,15 +180,11 @@ async def create_or_update( an existing record set. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -195,7 +195,7 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) @@ -207,7 +207,7 @@ async def create_or_update( else: _json = self._serialize.body(private_endpoint, "PrivateEndpoint") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, @@ -218,16 +218,14 @@ async def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -237,21 +235,13 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("PrivateEndpoint", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("PrivateEndpoint", pipeline_response) + deserialized = self._deserialize("PrivateEndpoint", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } - @distributed_trace_async async def get( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, **kwargs: Any @@ -265,12 +255,11 @@ async def get( :type cluster_name: str :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -281,25 +270,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -309,21 +296,17 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpoint", pipeline_response) + deserialized = self._deserialize("PrivateEndpoint", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } + return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements + async def _delete_initial( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -334,40 +317,43 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete( @@ -382,14 +368,6 @@ async def begin_delete( :type cluster_name: str :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -397,13 +375,13 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, @@ -413,11 +391,12 @@ async def begin_delete( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -426,17 +405,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def list_by_cluster( @@ -449,7 +424,6 @@ def list_by_cluster( :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PrivateEndpoint or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.PrivateEndpoint] @@ -458,10 +432,10 @@ def list_by_cluster( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpointListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -472,24 +446,32 @@ def list_by_cluster( def prepare_request(next_link=None): if not next_link: - request = build_list_by_cluster_request( + _request = build_list_by_cluster_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_cluster.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("PrivateEndpointListResult", pipeline_response) @@ -499,11 +481,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -515,7 +497,3 @@ async def get_next(next_link=None): return pipeline_response return AsyncItemPaged(get_next, extract_data) - - list_by_cluster.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints" - } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_sku_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_sku_operations.py deleted file mode 100644 index 17dec3141d05..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_sku_operations.py +++ /dev/null @@ -1,135 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._vendor import _convert_request -from ...operations._sku_operations import build_list_request - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class SkuOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.streamanalytics.aio.StreamAnalyticsManagementClient`'s - :attr:`sku` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> AsyncIterable["_models.GetStreamingJobSkuResult"]: - """Gets a list of available SKUs about the specified streaming job. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param job_name: The name of the streaming job. Required. - :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either GetStreamingJobSkuResult or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - cls: ClsType[_models.GetStreamingJobSkuResults] = kwargs.pop("cls", None) - - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_request( - resource_group_name=resource_group_name, - job_name=job_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("GetStreamingJobSkuResults", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/skus" - } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py index e5f246f37a20..e095db66f533 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -16,12 +18,13 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -29,7 +32,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._streaming_jobs_operations import ( build_create_or_replace_request, build_delete_request, @@ -42,6 +44,10 @@ build_update_request, ) +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -69,12 +75,12 @@ async def _create_or_replace_initial( self, resource_group_name: str, job_name: str, - streaming_job: Union[_models.StreamingJob, IO], + streaming_job: Union[_models.StreamingJob, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any - ) -> _models.StreamingJob: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -85,9 +91,9 @@ async def _create_or_replace_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -97,7 +103,7 @@ async def _create_or_replace_initial( else: _json = self._serialize.body(streaming_job, "StreamingJob") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -107,45 +113,38 @@ async def _create_or_replace_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_replace_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("StreamingJob", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - _create_or_replace_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } - @overload async def begin_create_or_replace( self, @@ -179,14 +178,6 @@ async def begin_create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -198,7 +189,7 @@ async def begin_create_or_replace( self, resource_group_name: str, job_name: str, - streaming_job: IO, + streaming_job: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -214,7 +205,7 @@ async def begin_create_or_replace( :type job_name: str :param streaming_job: The definition of the streaming job that will be used to create a new streaming job or replace the existing one. Required. - :type streaming_job: IO + :type streaming_job: IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -226,14 +217,6 @@ async def begin_create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -245,7 +228,7 @@ async def begin_create_or_replace( self, resource_group_name: str, job_name: str, - streaming_job: Union[_models.StreamingJob, IO], + streaming_job: Union[_models.StreamingJob, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -258,9 +241,9 @@ async def begin_create_or_replace( :param job_name: The name of the streaming job. Required. :type job_name: str :param streaming_job: The definition of the streaming job that will be used to create a new - streaming job or replace the existing one. Is either a StreamingJob type or a IO type. + streaming job or replace the existing one. Is either a StreamingJob type or a IO[bytes] type. Required. - :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO + :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -269,17 +252,6 @@ async def begin_create_or_replace( updating an existing record set. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -288,7 +260,7 @@ async def begin_create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -308,6 +280,7 @@ async def begin_create_or_replace( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): @@ -315,9 +288,9 @@ def get_long_running_output(pipeline_response): response = pipeline_response.http_response response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = self._deserialize("StreamingJob", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized if polling is True: @@ -327,17 +300,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[_models.StreamingJob].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return AsyncLROPoller[_models.StreamingJob]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @overload async def update( @@ -371,7 +342,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: @@ -382,7 +352,7 @@ async def update( self, resource_group_name: str, job_name: str, - streaming_job: IO, + streaming_job: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -401,7 +371,7 @@ async def update( Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Required. - :type streaming_job: IO + :type streaming_job: IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -409,7 +379,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: @@ -420,7 +389,7 @@ async def update( self, resource_group_name: str, job_name: str, - streaming_job: Union[_models.StreamingJob, IO], + streaming_job: Union[_models.StreamingJob, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.StreamingJob: @@ -436,21 +405,17 @@ async def update( corresponding properties in the existing streaming job (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Is - either a StreamingJob type or a IO type. Required. - :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO + either a StreamingJob type or a IO[bytes] type. Required. + :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -461,7 +426,7 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) @@ -473,7 +438,7 @@ async def update( else: _json = self._serialize.body(streaming_job, "StreamingJob") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -482,16 +447,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -504,21 +467,15 @@ async def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = self._deserialize("StreamingJob", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return deserialized # type: ignore - async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> None: - error_map = { + async def _delete_initial(self, resource_group_name: str, job_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -529,39 +486,42 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return deserialized # type: ignore @distributed_trace_async async def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -572,14 +532,6 @@ async def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: :type resource_group_name: str :param job_name: The name of the streaming job. Required. :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -587,13 +539,13 @@ async def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( # type: ignore + raw_result = await self._delete_initial( resource_group_name=resource_group_name, job_name=job_name, api_version=api_version, @@ -602,11 +554,12 @@ async def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -615,17 +568,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace_async async def get( @@ -643,12 +592,11 @@ async def get( parameter is absent. The default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and 'functions'. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -659,25 +607,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -690,16 +636,12 @@ async def get( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = self._deserialize("StreamingJob", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return deserialized # type: ignore @distributed_trace def list_by_resource_group( @@ -715,7 +657,6 @@ def list_by_resource_group( parameter is absent. The default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and 'functions'. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either StreamingJob or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -724,10 +665,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -738,24 +679,32 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) @@ -765,11 +714,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -782,10 +731,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs" - } - @distributed_trace def list(self, expand: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_models.StreamingJob"]: """Lists all of the streaming jobs in the given subscription. @@ -795,7 +740,6 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_m parameter is absent. The default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and 'functions'. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either StreamingJob or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -804,10 +748,10 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_m _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -818,23 +762,31 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_m def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request async def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) @@ -844,11 +796,11 @@ async def extract_data(pipeline_response): return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -861,16 +813,14 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs"} - - async def _start_initial( # pylint: disable=inconsistent-return-statements + async def _start_initial( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO]] = None, + start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -881,9 +831,9 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -896,7 +846,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements else: _json = None - request = build_start_request( + _request = build_start_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -904,31 +854,34 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _start_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" - } + return deserialized # type: ignore @overload async def begin_start( @@ -954,14 +907,6 @@ async def begin_start( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -972,7 +917,7 @@ async def begin_start( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional[IO] = None, + start_job_parameters: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -987,18 +932,10 @@ async def begin_start( :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. Default value is None. - :type start_job_parameters: IO + :type start_job_parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1009,7 +946,7 @@ async def begin_start( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO]] = None, + start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any ) -> AsyncLROPoller[None]: """Starts a streaming job. Once a job is started it will start processing input events and produce @@ -1021,20 +958,9 @@ async def begin_start( :param job_name: The name of the streaming job. Required. :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. Is - either a StartStreamingJobParameters type or a IO type. Default value is None. + either a StartStreamingJobParameters type or a IO[bytes] type. Default value is None. :type start_job_parameters: ~azure.mgmt.streamanalytics.models.StartStreamingJobParameters or - IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + IO[bytes] :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1042,14 +968,14 @@ async def begin_start( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._start_initial( # type: ignore + raw_result = await self._start_initial( resource_group_name=resource_group_name, job_name=job_name, start_job_parameters=start_job_parameters, @@ -1060,11 +986,12 @@ async def begin_start( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -1073,22 +1000,16 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_start.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _stop_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> None: - error_map = { + async def _stop_initial(self, resource_group_name: str, job_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1099,39 +1020,42 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) - request = build_stop_request( + _request = build_stop_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _stop_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" - } + return deserialized # type: ignore @distributed_trace_async async def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -1143,14 +1067,6 @@ async def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: An :type resource_group_name: str :param job_name: The name of the streaming job. Required. :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1158,13 +1074,13 @@ async def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._stop_initial( # type: ignore + raw_result = await self._stop_initial( resource_group_name=resource_group_name, job_name=job_name, api_version=api_version, @@ -1173,11 +1089,12 @@ async def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: An params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -1186,26 +1103,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_stop.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _scale_initial( # pylint: disable=inconsistent-return-statements + async def _scale_initial( self, resource_group_name: str, job_name: str, - scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO]] = None, + scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> AsyncIterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1216,9 +1129,9 @@ async def _scale_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1231,7 +1144,7 @@ async def _scale_initial( # pylint: disable=inconsistent-return-statements else: _json = None - request = build_scale_request( + _request = build_scale_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -1239,31 +1152,34 @@ async def _scale_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._scale_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _scale_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" - } + return deserialized # type: ignore @overload async def begin_scale( @@ -1288,14 +1204,6 @@ async def begin_scale( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1306,7 +1214,7 @@ async def begin_scale( self, resource_group_name: str, job_name: str, - scale_job_parameters: Optional[IO] = None, + scale_job_parameters: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1320,18 +1228,10 @@ async def begin_scale( :type job_name: str :param scale_job_parameters: Parameters applicable to a scale streaming job operation. Default value is None. - :type scale_job_parameters: IO + :type scale_job_parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1342,7 +1242,7 @@ async def begin_scale( self, resource_group_name: str, job_name: str, - scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO]] = None, + scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any ) -> AsyncLROPoller[None]: """Scales a streaming job when the job is running. @@ -1353,20 +1253,9 @@ async def begin_scale( :param job_name: The name of the streaming job. Required. :type job_name: str :param scale_job_parameters: Parameters applicable to a scale streaming job operation. Is - either a ScaleStreamingJobParameters type or a IO type. Default value is None. + either a ScaleStreamingJobParameters type or a IO[bytes] type. Default value is None. :type scale_job_parameters: ~azure.mgmt.streamanalytics.models.ScaleStreamingJobParameters or - IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + IO[bytes] :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1374,14 +1263,14 @@ async def begin_scale( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._scale_initial( # type: ignore + raw_result = await self._scale_initial( resource_group_name=resource_group_name, job_name=job_name, scale_job_parameters=scale_job_parameters, @@ -1392,11 +1281,12 @@ async def begin_scale( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) @@ -1405,14 +1295,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return AsyncLROPoller.from_continuation_token( + return AsyncLROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_scale.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" - } + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py index 6445befedee5..ef916de73648 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,8 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from io import IOBase -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,25 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request -from ...operations._subscriptions_operations import ( - build_compile_query_request, - build_list_quotas_request, - build_sample_input_request, - build_test_input_request, - build_test_output_request, - build_test_query_request, -) +from ...operations._subscriptions_operations import build_list_quotas_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -64,15 +57,15 @@ def __init__(self, *args, **kwargs) -> None: async def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuotasListResult: """Retrieves the subscription's current quota information in a particular region. - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. + :param location: The region in which to retrieve the subscription's quota information. You can + find out which regions Azure Stream Analytics is supported in here: + https://azure.microsoft.com/en-us/regions/. Required. :type location: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SubscriptionQuotasListResult or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.SubscriptionQuotasListResult :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -83,353 +76,21 @@ async def list_quotas(self, location: str, **kwargs: Any) -> _models.Subscriptio _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SubscriptionQuotasListResult] = kwargs.pop("cls", None) - request = build_list_quotas_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_quotas.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("SubscriptionQuotasListResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - list_quotas.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas" - } - - async def _test_query_initial( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> Optional[_models.QueryTestingResult]: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.QueryTestingResult]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_query, (IOBase, bytes)): - _content = test_query - else: - _json = self._serialize.body(test_query, "TestQuery") - - request = build_test_query_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_query_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_query_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery" - } - - @overload - async def begin_test_query( - self, location: str, test_query: _models.TestQuery, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_test_query( - self, location: str, test_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_test_query( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Is either a TestQuery type or a IO type. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.QueryTestingResult] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._test_query_initial( - location=location, - test_query=test_query, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test_query.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery" - } - - @overload - async def compile_query( - self, - location: str, - compile_query: _models.CompileQuery, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def compile_query( - self, location: str, compile_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def compile_query( - self, location: str, compile_query: Union[_models.CompileQuery, IO], **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Is either a CompileQuery type or a IO type. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.QueryCompilationResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(compile_query, (IOBase, bytes)): - _content = compile_query - else: - _json = self._serialize.body(compile_query, "CompileQuery") - - request = build_compile_query_request( + _request = build_list_quotas_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self.compile_query.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -439,616 +100,9 @@ async def compile_query( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("QueryCompilationResult", pipeline_response) + deserialized = self._deserialize("SubscriptionQuotasListResult", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - compile_query.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery" - } - - async def _sample_input_initial( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> _models.SampleInputResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SampleInputResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(sample_input, (IOBase, bytes)): - _content = sample_input - else: - _json = self._serialize.body(sample_input, "SampleInput") - - request = build_sample_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._sample_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("SampleInputResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _sample_input_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput" - } - - @overload - async def begin_sample_input( - self, location: str, sample_input: _models.SampleInput, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_sample_input( - self, location: str, sample_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_sample_input( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Is either a SampleInput type or a IO type. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SampleInputResult] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._sample_input_initial( - location=location, - sample_input=sample_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SampleInputResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_sample_input.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput" - } - - async def _test_input_initial( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_input, (IOBase, bytes)): - _content = test_input - else: - _json = self._serialize.body(test_input, "TestInput") - - request = build_test_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_input_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput" - } - - @overload - async def begin_test_input( - self, location: str, test_input: _models.TestInput, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_test_input( - self, location: str, test_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_test_input( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. Is - either a TestInput type or a IO type. Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._test_input_initial( - location=location, - test_input=test_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test_input.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput" - } - - async def _test_output_initial( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_output, (IOBase, bytes)): - _content = test_output - else: - _json = self._serialize.body(test_output, "TestOutput") - - request = build_test_output_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_output_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_output_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput" - } - - @overload - async def begin_test_output( - self, location: str, test_output: _models.TestOutput, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_test_output( - self, location: str, test_output: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_test_output( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Is either a TestOutput type or a IO type. Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = await self._test_output_initial( - location=location, - test_output=test_output, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: AsyncPollingMethod = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - begin_test_output.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput" - } + return deserialized # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py index 61a869a41341..b5b37aaf0c63 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +import sys +from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,20 +19,22 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._transformations_operations import ( build_create_or_replace_request, build_get_request, build_update_request, ) +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -92,7 +95,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -104,7 +106,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, transformation_name: str, - transformation: IO, + transformation: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -123,7 +125,7 @@ async def create_or_replace( :type transformation_name: str :param transformation: The definition of the transformation that will be used to create a new transformation or replace the existing one under the streaming job. Required. - :type transformation: IO + :type transformation: IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -135,7 +137,6 @@ async def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -147,7 +148,7 @@ async def create_or_replace( resource_group_name: str, job_name: str, transformation_name: str, - transformation: Union[_models.Transformation, IO], + transformation: Union[_models.Transformation, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -164,8 +165,8 @@ async def create_or_replace( :type transformation_name: str :param transformation: The definition of the transformation that will be used to create a new transformation or replace the existing one under the streaming job. Is either a Transformation - type or a IO type. Required. - :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO + type or a IO[bytes] type. Required. + :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -174,15 +175,11 @@ async def create_or_replace( updating an existing transformation. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -193,7 +190,7 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) @@ -205,7 +202,7 @@ async def create_or_replace( else: _json = self._serialize.body(transformation, "Transformation") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, transformation_name=transformation_name, @@ -216,16 +213,14 @@ async def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -236,25 +231,15 @@ async def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Transformation", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Transformation", pipeline_response) + deserialized = self._deserialize("Transformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" - } - @overload async def update( self, @@ -291,7 +276,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -303,7 +287,7 @@ async def update( resource_group_name: str, job_name: str, transformation_name: str, - transformation: IO, + transformation: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -325,7 +309,7 @@ async def update( updated). Any properties that are set to null here will mean that the corresponding property in the existing transformation will remain the same and not change as a result of this PATCH operation. Required. - :type transformation: IO + :type transformation: IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -333,7 +317,6 @@ async def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -345,7 +328,7 @@ async def update( resource_group_name: str, job_name: str, transformation_name: str, - transformation: Union[_models.Transformation, IO], + transformation: Union[_models.Transformation, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Transformation: @@ -364,21 +347,17 @@ async def update( the corresponding properties in the existing transformation (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing transformation will remain the same and not change as a result of this PATCH - operation. Is either a Transformation type or a IO type. Required. - :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO + operation. Is either a Transformation type or a IO[bytes] type. Required. + :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -389,7 +368,7 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) @@ -401,7 +380,7 @@ async def update( else: _json = self._serialize.body(transformation, "Transformation") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, transformation_name=transformation_name, @@ -411,16 +390,14 @@ async def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -433,16 +410,12 @@ async def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Transformation", pipeline_response) + deserialized = self._deserialize("Transformation", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" - } + return deserialized # type: ignore @distributed_trace_async async def get( @@ -457,12 +430,11 @@ async def get( :type job_name: str :param transformation_name: The name of the transformation. Required. :type transformation_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -473,25 +445,23 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, transformation_name=transformation_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -504,13 +474,9 @@ async def get( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Transformation", pipeline_response) + deserialized = self._deserialize("Transformation", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" - } + return deserialized # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py index 0ae4229170c1..7873203fda0d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -8,20 +8,14 @@ from ._models_py3 import AggregateFunctionProperties from ._models_py3 import AvroSerialization -from ._models_py3 import AzureDataExplorerOutputDataSource from ._models_py3 import AzureDataLakeStoreOutputDataSource from ._models_py3 import AzureDataLakeStoreOutputDataSourceProperties from ._models_py3 import AzureFunctionOutputDataSource -from ._models_py3 import AzureMachineLearningServiceFunctionBinding -from ._models_py3 import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters -from ._models_py3 import AzureMachineLearningServiceInputColumn -from ._models_py3 import AzureMachineLearningServiceInputs -from ._models_py3 import AzureMachineLearningServiceOutputColumn -from ._models_py3 import AzureMachineLearningStudioFunctionBinding -from ._models_py3 import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters -from ._models_py3 import AzureMachineLearningStudioInputColumn -from ._models_py3 import AzureMachineLearningStudioInputs -from ._models_py3 import AzureMachineLearningStudioOutputColumn +from ._models_py3 import AzureMachineLearningWebServiceFunctionBinding +from ._models_py3 import AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters +from ._models_py3 import AzureMachineLearningWebServiceInputColumn +from ._models_py3 import AzureMachineLearningWebServiceInputs +from ._models_py3 import AzureMachineLearningWebServiceOutputColumn from ._models_py3 import AzureSqlDatabaseDataSourceProperties from ._models_py3 import AzureSqlDatabaseOutputDataSource from ._models_py3 import AzureSqlDatabaseOutputDataSourceProperties @@ -37,20 +31,14 @@ from ._models_py3 import BlobReferenceInputDataSourceProperties from ._models_py3 import BlobStreamInputDataSource from ._models_py3 import BlobStreamInputDataSourceProperties -from ._models_py3 import CSharpFunctionBinding -from ._models_py3 import CSharpFunctionRetrieveDefaultDefinitionParameters from ._models_py3 import Cluster from ._models_py3 import ClusterInfo from ._models_py3 import ClusterJob from ._models_py3 import ClusterJobListResult from ._models_py3 import ClusterListResult -from ._models_py3 import ClusterProperties from ._models_py3 import ClusterSku -from ._models_py3 import CompileQuery from ._models_py3 import Compression from ._models_py3 import CsvSerialization -from ._models_py3 import CustomClrSerialization -from ._models_py3 import DeltaSerialization from ._models_py3 import DiagnosticCondition from ._models_py3 import Diagnostics from ._models_py3 import DocumentDbOutputDataSource @@ -58,7 +46,6 @@ from ._models_py3 import ErrorDetails from ._models_py3 import ErrorError from ._models_py3 import ErrorResponse -from ._models_py3 import EventGridStreamInputDataSource from ._models_py3 import EventHubDataSourceProperties from ._models_py3 import EventHubOutputDataSource from ._models_py3 import EventHubOutputDataSourceProperties @@ -66,7 +53,6 @@ from ._models_py3 import EventHubStreamInputDataSourceProperties from ._models_py3 import EventHubV2OutputDataSource from ._models_py3 import EventHubV2StreamInputDataSource -from ._models_py3 import External from ._models_py3 import FileReferenceInputDataSource from ._models_py3 import Function from ._models_py3 import FunctionBinding @@ -80,20 +66,15 @@ from ._models_py3 import GatewayMessageBusSourceProperties from ._models_py3 import GatewayMessageBusStreamInputDataSource from ._models_py3 import GatewayMessageBusStreamInputDataSourceProperties -from ._models_py3 import GetStreamingJobSkuResult -from ._models_py3 import GetStreamingJobSkuResultSku -from ._models_py3 import GetStreamingJobSkuResults from ._models_py3 import Identity from ._models_py3 import Input from ._models_py3 import InputListResult from ._models_py3 import InputProperties -from ._models_py3 import InputWatermarkProperties from ._models_py3 import IoTHubStreamInputDataSource from ._models_py3 import JavaScriptFunctionBinding from ._models_py3 import JavaScriptFunctionRetrieveDefaultDefinitionParameters from ._models_py3 import JobStorageAccount from ._models_py3 import JsonSerialization -from ._models_py3 import LastOutputEventTimestamp from ._models_py3 import OAuthBasedDataSourceProperties from ._models_py3 import Operation from ._models_py3 import OperationDisplay @@ -101,34 +82,18 @@ from ._models_py3 import Output from ._models_py3 import OutputDataSource from ._models_py3 import OutputListResult -from ._models_py3 import OutputWatermarkProperties from ._models_py3 import ParquetSerialization -from ._models_py3 import PostgreSQLDataSourceProperties -from ._models_py3 import PostgreSQLOutputDataSource -from ._models_py3 import PostgreSQLOutputDataSourceProperties from ._models_py3 import PowerBIOutputDataSource from ._models_py3 import PowerBIOutputDataSourceProperties from ._models_py3 import PrivateEndpoint from ._models_py3 import PrivateEndpointListResult -from ._models_py3 import PrivateEndpointProperties from ._models_py3 import PrivateLinkConnectionState from ._models_py3 import PrivateLinkServiceConnection from ._models_py3 import ProxyResource -from ._models_py3 import QueryCompilationError -from ._models_py3 import QueryCompilationResult -from ._models_py3 import QueryFunction -from ._models_py3 import QueryInput -from ._models_py3 import QueryTestingResult -from ._models_py3 import RawOutputDatasource -from ._models_py3 import RawReferenceInputDataSource -from ._models_py3 import RawStreamInputDataSource from ._models_py3 import ReferenceInputDataSource from ._models_py3 import ReferenceInputProperties -from ._models_py3 import RefreshConfiguration from ._models_py3 import Resource from ._models_py3 import ResourceTestStatus -from ._models_py3 import SampleInput -from ._models_py3 import SampleInputResult from ._models_py3 import ScalarFunctionProperties from ._models_py3 import ScaleStreamingJobParameters from ._models_py3 import Serialization @@ -138,7 +103,6 @@ from ._models_py3 import ServiceBusTopicOutputDataSource from ._models_py3 import ServiceBusTopicOutputDataSourceProperties from ._models_py3 import Sku -from ._models_py3 import SkuCapacity from ._models_py3 import StartStreamingJobParameters from ._models_py3 import StorageAccount from ._models_py3 import StreamInputDataSource @@ -148,41 +112,25 @@ from ._models_py3 import SubResource from ._models_py3 import SubscriptionQuota from ._models_py3 import SubscriptionQuotasListResult -from ._models_py3 import TestDatasourceResult -from ._models_py3 import TestInput -from ._models_py3 import TestOutput -from ._models_py3 import TestQuery -from ._models_py3 import TestQueryDiagnostics from ._models_py3 import TrackedResource from ._models_py3 import Transformation from ._stream_analytics_management_client_enums import AuthenticationMode -from ._stream_analytics_management_client_enums import BlobWriteMode from ._stream_analytics_management_client_enums import ClusterProvisioningState from ._stream_analytics_management_client_enums import ClusterSkuName from ._stream_analytics_management_client_enums import CompatibilityLevel from ._stream_analytics_management_client_enums import CompressionType from ._stream_analytics_management_client_enums import ContentStoragePolicy from ._stream_analytics_management_client_enums import Encoding -from ._stream_analytics_management_client_enums import EventGridEventSchemaType from ._stream_analytics_management_client_enums import EventSerializationType from ._stream_analytics_management_client_enums import EventsOutOfOrderPolicy -from ._stream_analytics_management_client_enums import InputWatermarkMode from ._stream_analytics_management_client_enums import JobState from ._stream_analytics_management_client_enums import JobType from ._stream_analytics_management_client_enums import JsonOutputSerializationFormat from ._stream_analytics_management_client_enums import OutputErrorPolicy from ._stream_analytics_management_client_enums import OutputStartMode -from ._stream_analytics_management_client_enums import OutputWatermarkMode -from ._stream_analytics_management_client_enums import QueryTestingResultStatus from ._stream_analytics_management_client_enums import RefreshType -from ._stream_analytics_management_client_enums import ResourceType -from ._stream_analytics_management_client_enums import SampleInputResultStatus -from ._stream_analytics_management_client_enums import SkuCapacityScaleType from ._stream_analytics_management_client_enums import SkuName -from ._stream_analytics_management_client_enums import TestDatasourceResultStatus -from ._stream_analytics_management_client_enums import UpdatableUdfRefreshType -from ._stream_analytics_management_client_enums import UpdateMode from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk @@ -190,20 +138,14 @@ __all__ = [ "AggregateFunctionProperties", "AvroSerialization", - "AzureDataExplorerOutputDataSource", "AzureDataLakeStoreOutputDataSource", "AzureDataLakeStoreOutputDataSourceProperties", "AzureFunctionOutputDataSource", - "AzureMachineLearningServiceFunctionBinding", - "AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters", - "AzureMachineLearningServiceInputColumn", - "AzureMachineLearningServiceInputs", - "AzureMachineLearningServiceOutputColumn", - "AzureMachineLearningStudioFunctionBinding", - "AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters", - "AzureMachineLearningStudioInputColumn", - "AzureMachineLearningStudioInputs", - "AzureMachineLearningStudioOutputColumn", + "AzureMachineLearningWebServiceFunctionBinding", + "AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters", + "AzureMachineLearningWebServiceInputColumn", + "AzureMachineLearningWebServiceInputs", + "AzureMachineLearningWebServiceOutputColumn", "AzureSqlDatabaseDataSourceProperties", "AzureSqlDatabaseOutputDataSource", "AzureSqlDatabaseOutputDataSourceProperties", @@ -219,20 +161,14 @@ "BlobReferenceInputDataSourceProperties", "BlobStreamInputDataSource", "BlobStreamInputDataSourceProperties", - "CSharpFunctionBinding", - "CSharpFunctionRetrieveDefaultDefinitionParameters", "Cluster", "ClusterInfo", "ClusterJob", "ClusterJobListResult", "ClusterListResult", - "ClusterProperties", "ClusterSku", - "CompileQuery", "Compression", "CsvSerialization", - "CustomClrSerialization", - "DeltaSerialization", "DiagnosticCondition", "Diagnostics", "DocumentDbOutputDataSource", @@ -240,7 +176,6 @@ "ErrorDetails", "ErrorError", "ErrorResponse", - "EventGridStreamInputDataSource", "EventHubDataSourceProperties", "EventHubOutputDataSource", "EventHubOutputDataSourceProperties", @@ -248,7 +183,6 @@ "EventHubStreamInputDataSourceProperties", "EventHubV2OutputDataSource", "EventHubV2StreamInputDataSource", - "External", "FileReferenceInputDataSource", "Function", "FunctionBinding", @@ -262,20 +196,15 @@ "GatewayMessageBusSourceProperties", "GatewayMessageBusStreamInputDataSource", "GatewayMessageBusStreamInputDataSourceProperties", - "GetStreamingJobSkuResult", - "GetStreamingJobSkuResultSku", - "GetStreamingJobSkuResults", "Identity", "Input", "InputListResult", "InputProperties", - "InputWatermarkProperties", "IoTHubStreamInputDataSource", "JavaScriptFunctionBinding", "JavaScriptFunctionRetrieveDefaultDefinitionParameters", "JobStorageAccount", "JsonSerialization", - "LastOutputEventTimestamp", "OAuthBasedDataSourceProperties", "Operation", "OperationDisplay", @@ -283,34 +212,18 @@ "Output", "OutputDataSource", "OutputListResult", - "OutputWatermarkProperties", "ParquetSerialization", - "PostgreSQLDataSourceProperties", - "PostgreSQLOutputDataSource", - "PostgreSQLOutputDataSourceProperties", "PowerBIOutputDataSource", "PowerBIOutputDataSourceProperties", "PrivateEndpoint", "PrivateEndpointListResult", - "PrivateEndpointProperties", "PrivateLinkConnectionState", "PrivateLinkServiceConnection", "ProxyResource", - "QueryCompilationError", - "QueryCompilationResult", - "QueryFunction", - "QueryInput", - "QueryTestingResult", - "RawOutputDatasource", - "RawReferenceInputDataSource", - "RawStreamInputDataSource", "ReferenceInputDataSource", "ReferenceInputProperties", - "RefreshConfiguration", "Resource", "ResourceTestStatus", - "SampleInput", - "SampleInputResult", "ScalarFunctionProperties", "ScaleStreamingJobParameters", "Serialization", @@ -320,7 +233,6 @@ "ServiceBusTopicOutputDataSource", "ServiceBusTopicOutputDataSourceProperties", "Sku", - "SkuCapacity", "StartStreamingJobParameters", "StorageAccount", "StreamInputDataSource", @@ -330,40 +242,24 @@ "SubResource", "SubscriptionQuota", "SubscriptionQuotasListResult", - "TestDatasourceResult", - "TestInput", - "TestOutput", - "TestQuery", - "TestQueryDiagnostics", "TrackedResource", "Transformation", "AuthenticationMode", - "BlobWriteMode", "ClusterProvisioningState", "ClusterSkuName", "CompatibilityLevel", "CompressionType", "ContentStoragePolicy", "Encoding", - "EventGridEventSchemaType", "EventSerializationType", "EventsOutOfOrderPolicy", - "InputWatermarkMode", "JobState", "JobType", "JsonOutputSerializationFormat", "OutputErrorPolicy", "OutputStartMode", - "OutputWatermarkMode", - "QueryTestingResultStatus", "RefreshType", - "ResourceType", - "SampleInputResultStatus", - "SkuCapacityScaleType", "SkuName", - "TestDatasourceResultStatus", - "UpdatableUdfRefreshType", - "UpdateMode", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py index 225d59c1993c..eae86174f87b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -9,7 +9,7 @@ import datetime import sys -from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union +from typing import Any, Dict, List, Literal, Optional, TYPE_CHECKING, Union from .. import _serialization @@ -17,10 +17,6 @@ from collections.abc import MutableMapping else: from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -36,7 +32,7 @@ class FunctionProperties(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of function. Required. :vartype type: str @@ -98,7 +94,7 @@ class AggregateFunctionProperties(FunctionProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of function. Required. :vartype type: str @@ -154,14 +150,12 @@ class Serialization(_serialization.Model): output. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AvroSerialization, CsvSerialization, CustomClrSerialization, DeltaSerialization, - JsonSerialization, ParquetSerialization + AvroSerialization, CsvSerialization, JsonSerialization, ParquetSerialization - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", - "Parquet", and "Delta". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType """ @@ -177,8 +171,6 @@ class Serialization(_serialization.Model): "type": { "Avro": "AvroSerialization", "Csv": "CsvSerialization", - "CustomClr": "CustomClrSerialization", - "Delta": "DeltaSerialization", "Json": "JsonSerialization", "Parquet": "ParquetSerialization", } @@ -194,11 +186,10 @@ class AvroSerialization(Serialization): """Describes how data from an input is serialized or how data is serialized when written to an output in Avro format. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", - "Parquet", and "Delta". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar properties: The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. @@ -229,14 +220,13 @@ class OutputDataSource(_serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - GatewayMessageBusOutputDataSource, AzureFunctionOutputDataSource, PostgreSQLOutputDataSource, - AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, - AzureDataExplorerOutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, - ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, + GatewayMessageBusOutputDataSource, AzureFunctionOutputDataSource, + AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, + ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, - AzureTableOutputDataSource, PowerBIOutputDataSource, RawOutputDatasource + AzureTableOutputDataSource, PowerBIOutputDataSource - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -255,10 +245,8 @@ class OutputDataSource(_serialization.Model): "type": { "GatewayMessageBus": "GatewayMessageBusOutputDataSource", "Microsoft.AzureFunction": "AzureFunctionOutputDataSource", - "Microsoft.DBForPostgreSQL/servers/databases": "PostgreSQLOutputDataSource", "Microsoft.DataLake/Accounts": "AzureDataLakeStoreOutputDataSource", "Microsoft.EventHub/EventHub": "EventHubV2OutputDataSource", - "Microsoft.Kusto/clusters/databases": "AzureDataExplorerOutputDataSource", "Microsoft.ServiceBus/EventHub": "EventHubOutputDataSource", "Microsoft.ServiceBus/Queue": "ServiceBusQueueOutputDataSource", "Microsoft.ServiceBus/Topic": "ServiceBusTopicOutputDataSource", @@ -268,7 +256,6 @@ class OutputDataSource(_serialization.Model): "Microsoft.Storage/DocumentDB": "DocumentDbOutputDataSource", "Microsoft.Storage/Table": "AzureTableOutputDataSource", "PowerBI": "PowerBIOutputDataSource", - "Raw": "RawOutputDatasource", } } @@ -278,73 +265,10 @@ def __init__(self, **kwargs: Any) -> None: self.type: Optional[str] = None -class AzureDataExplorerOutputDataSource(OutputDataSource): - """Describes an Azure Data Explorer output data source. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of data source output will be written to. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar cluster: The name of the Azure Data Explorer cluster. Required on PUT (CreateOrReplace) - requests. - :vartype cluster: str - :ivar database: The name of the Azure Data Explorer database. Required on PUT (CreateOrReplace) - requests. - :vartype database: str - :ivar table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. - :vartype table: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "cluster": {"key": "properties.cluster", "type": "str"}, - "database": {"key": "properties.database", "type": "str"}, - "table": {"key": "properties.table", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - cluster: Optional[str] = None, - database: Optional[str] = None, - table: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs: Any - ) -> None: - """ - :keyword cluster: The name of the Azure Data Explorer cluster. Required on PUT - (CreateOrReplace) requests. - :paramtype cluster: str - :keyword database: The name of the Azure Data Explorer database. Required on PUT - (CreateOrReplace) requests. - :paramtype database: str - :keyword table: The name of the Azure Table. Required on PUT (CreateOrReplace) requests. - :paramtype table: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__(**kwargs) - self.type: str = "Microsoft.Kusto/clusters/databases" - self.cluster = cluster - self.database = database - self.table = table - self.authentication_mode = authentication_mode - - class AzureDataLakeStoreOutputDataSource(OutputDataSource): """Describes an Azure Data Lake Store output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -521,7 +445,7 @@ def __init__( self.token_user_display_name = token_user_display_name -class AzureDataLakeStoreOutputDataSourceProperties(OAuthBasedDataSourceProperties): +class AzureDataLakeStoreOutputDataSourceProperties(OAuthBasedDataSourceProperties): # pylint: disable=name-too-long """The properties that are associated with an Azure Data Lake Store. :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can @@ -637,7 +561,7 @@ def __init__( class AzureFunctionOutputDataSource(OutputDataSource): """Defines the metadata of AzureFunctionOutputDataSource. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -711,10 +635,9 @@ class FunctionBinding(_serialization.Model): case, this describes the endpoint. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, - CSharpFunctionBinding, JavaScriptFunctionBinding + AzureMachineLearningWebServiceFunctionBinding, JavaScriptFunctionBinding - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the function binding type. Required. :vartype type: str @@ -730,9 +653,7 @@ class FunctionBinding(_serialization.Model): _subtype_map = { "type": { - "Microsoft.MachineLearning/WebService": "AzureMachineLearningStudioFunctionBinding", - "Microsoft.MachineLearningServices": "AzureMachineLearningServiceFunctionBinding", - "Microsoft.StreamAnalytics/CLRUdf": "CSharpFunctionBinding", + "Microsoft.MachineLearning/WebService": "AzureMachineLearningWebServiceFunctionBinding", "Microsoft.StreamAnalytics/JavascriptUdf": "JavaScriptFunctionBinding", } } @@ -743,35 +664,28 @@ def __init__(self, **kwargs: Any) -> None: self.type: Optional[str] = None -class AzureMachineLearningServiceFunctionBinding(FunctionBinding): +class AzureMachineLearningWebServiceFunctionBinding(FunctionBinding): # pylint: disable=name-too-long """The binding to an Azure Machine Learning web service. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the function binding type. Required. :vartype type: str :ivar endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. # pylint: disable=line-too-long :vartype endpoint: str :ivar api_key: The API key used to authenticate with Request-Response endpoint. :vartype api_key: str :ivar inputs: The inputs for the Azure Machine Learning web service endpoint. - :vartype inputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + :vartype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputs :ivar outputs: A list of outputs from the Azure Machine Learning web service endpoint execution. :vartype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceOutputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceOutputColumn] :ivar batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. :vartype batch_size: int - :ivar number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :vartype number_of_parallel_requests: int - :ivar input_request_name: Label for the input request object. - :vartype input_request_name: str - :ivar output_response_name: Label for the output request object. - :vartype output_response_name: str """ _validation = { @@ -782,12 +696,9 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): "type": {"key": "type", "type": "str"}, "endpoint": {"key": "properties.endpoint", "type": "str"}, "api_key": {"key": "properties.apiKey", "type": "str"}, - "inputs": {"key": "properties.inputs", "type": "[AzureMachineLearningServiceInputColumn]"}, - "outputs": {"key": "properties.outputs", "type": "[AzureMachineLearningServiceOutputColumn]"}, + "inputs": {"key": "properties.inputs", "type": "AzureMachineLearningWebServiceInputs"}, + "outputs": {"key": "properties.outputs", "type": "[AzureMachineLearningWebServiceOutputColumn]"}, "batch_size": {"key": "properties.batchSize", "type": "int"}, - "number_of_parallel_requests": {"key": "properties.numberOfParallelRequests", "type": "int"}, - "input_request_name": {"key": "properties.inputRequestName", "type": "str"}, - "output_response_name": {"key": "properties.outputResponseName", "type": "str"}, } def __init__( @@ -795,60 +706,45 @@ def __init__( *, endpoint: Optional[str] = None, api_key: Optional[str] = None, - inputs: Optional[List["_models.AzureMachineLearningServiceInputColumn"]] = None, - outputs: Optional[List["_models.AzureMachineLearningServiceOutputColumn"]] = None, + inputs: Optional["_models.AzureMachineLearningWebServiceInputs"] = None, + outputs: Optional[List["_models.AzureMachineLearningWebServiceOutputColumn"]] = None, batch_size: Optional[int] = None, - number_of_parallel_requests: Optional[int] = None, - input_request_name: Optional[str] = None, - output_response_name: Optional[str] = None, **kwargs: Any ) -> None: """ :keyword endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. # pylint: disable=line-too-long :paramtype endpoint: str :keyword api_key: The API key used to authenticate with Request-Response endpoint. :paramtype api_key: str :keyword inputs: The inputs for the Azure Machine Learning web service endpoint. - :paramtype inputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + :paramtype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputs :keyword outputs: A list of outputs from the Azure Machine Learning web service endpoint execution. :paramtype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceOutputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceOutputColumn] :keyword batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. :paramtype batch_size: int - :keyword number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :paramtype number_of_parallel_requests: int - :keyword input_request_name: Label for the input request object. - :paramtype input_request_name: str - :keyword output_response_name: Label for the output request object. - :paramtype output_response_name: str """ super().__init__(**kwargs) - self.type: str = "Microsoft.MachineLearningServices" + self.type: str = "Microsoft.MachineLearning/WebService" self.endpoint = endpoint self.api_key = api_key self.inputs = inputs self.outputs = outputs self.batch_size = batch_size - self.number_of_parallel_requests = number_of_parallel_requests - self.input_request_name = input_request_name - self.output_response_name = output_response_name -class FunctionRetrieveDefaultDefinitionParameters(_serialization.Model): +class FunctionRetrieveDefaultDefinitionParameters(_serialization.Model): # pylint: disable=name-too-long """Parameters used to specify the type of function to retrieve the default definition for. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, - AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, - CSharpFunctionRetrieveDefaultDefinitionParameters, + AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar binding_type: Indicates the function binding type. Required. :vartype binding_type: str @@ -864,9 +760,7 @@ class FunctionRetrieveDefaultDefinitionParameters(_serialization.Model): _subtype_map = { "binding_type": { - "Microsoft.MachineLearning/WebService": "AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters", - "Microsoft.MachineLearningServices": "AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters", - "Microsoft.StreamAnalytics/CLRUdf": "CSharpFunctionRetrieveDefaultDefinitionParameters", + "Microsoft.MachineLearning/WebService": "AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters", "Microsoft.StreamAnalytics/JavascriptUdf": "JavaScriptFunctionRetrieveDefaultDefinitionParameters", } } @@ -877,247 +771,19 @@ def __init__(self, **kwargs: Any) -> None: self.binding_type: Optional[str] = None -class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters( +class AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters( FunctionRetrieveDefaultDefinitionParameters -): +): # pylint: disable=name-too-long """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar binding_type: Indicates the function binding type. Required. :vartype binding_type: str :ivar execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. - :vartype execute_endpoint: str - :ivar udf_type: The function type. Default value is "Scalar". - :vartype udf_type: str - """ - - _validation = { - "binding_type": {"required": True}, - } - - _attribute_map = { - "binding_type": {"key": "bindingType", "type": "str"}, - "execute_endpoint": {"key": "bindingRetrievalProperties.executeEndpoint", "type": "str"}, - "udf_type": {"key": "bindingRetrievalProperties.udfType", "type": "str"}, - } - - def __init__( - self, *, execute_endpoint: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs: Any - ) -> None: - """ - :keyword execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - web service. - :paramtype execute_endpoint: str - :keyword udf_type: The function type. Default value is "Scalar". - :paramtype udf_type: str - """ - super().__init__(**kwargs) - self.binding_type: str = "Microsoft.MachineLearningServices" - self.execute_endpoint = execute_endpoint - self.udf_type = udf_type - - -class AzureMachineLearningServiceInputColumn(_serialization.Model): - """Describes an input column for the Azure Machine Learning web service endpoint. - - :ivar name: The name of the input column. - :vartype name: str - :ivar data_type: The (Azure Machine Learning supported) data type of the input column. - :vartype data_type: str - :ivar map_to: The zero based index of the function parameter this input maps to. - :vartype map_to: int - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "data_type": {"key": "dataType", "type": "str"}, - "map_to": {"key": "mapTo", "type": "int"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - data_type: Optional[str] = None, - map_to: Optional[int] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: The name of the input column. - :paramtype name: str - :keyword data_type: The (Azure Machine Learning supported) data type of the input column. - :paramtype data_type: str - :keyword map_to: The zero based index of the function parameter this input maps to. - :paramtype map_to: int - """ - super().__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningServiceInputs(_serialization.Model): - """The inputs for the Azure Machine Learning web service endpoint. - - :ivar name: The name of the input. This is the name provided while authoring the endpoint. - :vartype name: str - :ivar column_names: A list of input columns for the Azure Machine Learning web service - endpoint. - :vartype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "column_names": {"key": "columnNames", "type": "[AzureMachineLearningServiceInputColumn]"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - column_names: Optional[List["_models.AzureMachineLearningServiceInputColumn"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: The name of the input. This is the name provided while authoring the endpoint. - :paramtype name: str - :keyword column_names: A list of input columns for the Azure Machine Learning web service - endpoint. - :paramtype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] - """ - super().__init__(**kwargs) - self.name = name - self.column_names = column_names - - -class AzureMachineLearningServiceOutputColumn(_serialization.Model): - """Describes an output column for the Azure Machine Learning web service endpoint. - - :ivar name: The name of the output column. - :vartype name: str - :ivar data_type: The (Azure Machine Learning supported) data type of the output column. - :vartype data_type: str - :ivar map_to: The zero based index of the function parameter this input maps to. - :vartype map_to: int - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "data_type": {"key": "dataType", "type": "str"}, - "map_to": {"key": "mapTo", "type": "int"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - data_type: Optional[str] = None, - map_to: Optional[int] = None, - **kwargs: Any - ) -> None: - """ - :keyword name: The name of the output column. - :paramtype name: str - :keyword data_type: The (Azure Machine Learning supported) data type of the output column. - :paramtype data_type: str - :keyword map_to: The zero based index of the function parameter this input maps to. - :paramtype map_to: int - """ - super().__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningStudioFunctionBinding(FunctionBinding): - """The binding to an Azure Machine Learning Studio. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the function binding type. Required. - :vartype type: str - :ivar endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: - https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. - :vartype endpoint: str - :ivar api_key: The API key used to authenticate with Request-Response endpoint. - :vartype api_key: str - :ivar inputs: The inputs for the Azure Machine Learning Studio endpoint. - :vartype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputs - :ivar outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :vartype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioOutputColumn] - :ivar batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure - ML RRS execute request. Default is 1000. - :vartype batch_size: int - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "endpoint": {"key": "properties.endpoint", "type": "str"}, - "api_key": {"key": "properties.apiKey", "type": "str"}, - "inputs": {"key": "properties.inputs", "type": "AzureMachineLearningStudioInputs"}, - "outputs": {"key": "properties.outputs", "type": "[AzureMachineLearningStudioOutputColumn]"}, - "batch_size": {"key": "properties.batchSize", "type": "int"}, - } - - def __init__( - self, - *, - endpoint: Optional[str] = None, - api_key: Optional[str] = None, - inputs: Optional["_models.AzureMachineLearningStudioInputs"] = None, - outputs: Optional[List["_models.AzureMachineLearningStudioOutputColumn"]] = None, - batch_size: Optional[int] = None, - **kwargs: Any - ) -> None: - """ - :keyword endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: - https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. - :paramtype endpoint: str - :keyword api_key: The API key used to authenticate with Request-Response endpoint. - :paramtype api_key: str - :keyword inputs: The inputs for the Azure Machine Learning Studio endpoint. - :paramtype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputs - :keyword outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :paramtype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioOutputColumn] - :keyword batch_size: Number between 1 and 10000 describing maximum number of rows for every - Azure ML RRS execute request. Default is 1000. - :paramtype batch_size: int - """ - super().__init__(**kwargs) - self.type: str = "Microsoft.MachineLearning/WebService" - self.endpoint = endpoint - self.api_key = api_key - self.inputs = inputs - self.outputs = outputs - self.batch_size = batch_size - - -class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters( - FunctionRetrieveDefaultDefinitionParameters -): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning - Studio function. - - All required parameters must be populated in order to send to Azure. - - :ivar binding_type: Indicates the function binding type. Required. - :vartype binding_type: str - :ivar execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: - https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. # pylint: disable=line-too-long :vartype execute_endpoint: str :ivar udf_type: The function type. Default value is "Scalar". :vartype udf_type: str @@ -1138,8 +804,8 @@ def __init__( ) -> None: """ :keyword execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: - https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. + web service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. # pylint: disable=line-too-long :paramtype execute_endpoint: str :keyword udf_type: The function type. Default value is "Scalar". :paramtype udf_type: str @@ -1150,8 +816,8 @@ def __init__( self.udf_type = udf_type -class AzureMachineLearningStudioInputColumn(_serialization.Model): - """Describes an input column for the Azure Machine Learning Studio endpoint. +class AzureMachineLearningWebServiceInputColumn(_serialization.Model): # pylint: disable=name-too-long + """Describes an input column for the Azure Machine Learning web service endpoint. :ivar name: The name of the input column. :vartype name: str @@ -1193,42 +859,44 @@ def __init__( self.map_to = map_to -class AzureMachineLearningStudioInputs(_serialization.Model): - """The inputs for the Azure Machine Learning Studio endpoint. +class AzureMachineLearningWebServiceInputs(_serialization.Model): + """The inputs for the Azure Machine Learning web service endpoint. :ivar name: The name of the input. This is the name provided while authoring the endpoint. :vartype name: str - :ivar column_names: A list of input columns for the Azure Machine Learning Studio endpoint. + :ivar column_names: A list of input columns for the Azure Machine Learning web service + endpoint. :vartype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputColumn] """ _attribute_map = { "name": {"key": "name", "type": "str"}, - "column_names": {"key": "columnNames", "type": "[AzureMachineLearningStudioInputColumn]"}, + "column_names": {"key": "columnNames", "type": "[AzureMachineLearningWebServiceInputColumn]"}, } def __init__( self, *, name: Optional[str] = None, - column_names: Optional[List["_models.AzureMachineLearningStudioInputColumn"]] = None, + column_names: Optional[List["_models.AzureMachineLearningWebServiceInputColumn"]] = None, **kwargs: Any ) -> None: """ :keyword name: The name of the input. This is the name provided while authoring the endpoint. :paramtype name: str - :keyword column_names: A list of input columns for the Azure Machine Learning Studio endpoint. + :keyword column_names: A list of input columns for the Azure Machine Learning web service + endpoint. :paramtype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputColumn] """ super().__init__(**kwargs) self.name = name self.column_names = column_names -class AzureMachineLearningStudioOutputColumn(_serialization.Model): - """Describes an output column for the Azure Machine Learning Studio endpoint. +class AzureMachineLearningWebServiceOutputColumn(_serialization.Model): # pylint: disable=name-too-long + """Describes an output column for the Azure Machine Learning web service endpoint. :ivar name: The name of the output column. :vartype name: str @@ -1349,7 +1017,7 @@ def __init__( class AzureSqlDatabaseOutputDataSource(OutputDataSource): """Describes an Azure SQL database output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -1446,7 +1114,7 @@ def __init__( self.authentication_mode = authentication_mode -class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourceProperties): +class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourceProperties): # pylint: disable=name-too-long """The properties that are associated with an Azure SQL database output. :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT @@ -1474,77 +1142,14 @@ class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourcePrope :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ - _attribute_map = { - "server": {"key": "server", "type": "str"}, - "database": {"key": "database", "type": "str"}, - "user": {"key": "user", "type": "str"}, - "password": {"key": "password", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "max_batch_count": {"key": "maxBatchCount", "type": "float"}, - "max_writer_count": {"key": "maxWriterCount", "type": "float"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - server: Optional[str] = None, - database: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - table: Optional[str] = None, - max_batch_count: Optional[float] = None, - max_writer_count: Optional[float] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs: Any - ) -> None: - """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :paramtype database: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword max_batch_count: Max Batch count for write to Sql database, the default value is - 10,000. Optional on PUT requests. - :paramtype max_batch_count: float - :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :paramtype max_writer_count: float - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__( - server=server, - database=database, - user=user, - password=password, - table=table, - max_batch_count=max_batch_count, - max_writer_count=max_writer_count, - authentication_mode=authentication_mode, - **kwargs - ) - class ReferenceInputDataSource(_serialization.Model): """Describes an input data source that contains reference data. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - FileReferenceInputDataSource, AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource, - RawReferenceInputDataSource + FileReferenceInputDataSource, AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests. Required. @@ -1564,7 +1169,6 @@ class ReferenceInputDataSource(_serialization.Model): "File": "FileReferenceInputDataSource", "Microsoft.Sql/Server/Database": "AzureSqlReferenceInputDataSource", "Microsoft.Storage/Blob": "BlobReferenceInputDataSource", - "Raw": "RawReferenceInputDataSource", } } @@ -1577,7 +1181,7 @@ def __init__(self, **kwargs: Any) -> None: class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): """Describes an Azure SQL database reference input data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests. Required. @@ -1594,6 +1198,9 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): :ivar password: This element is associated with the datasource element. This is the password that will be used to connect to the SQL Database instance. :vartype password: str + :ivar table: This element is associated with the datasource element. The name of the table in + the Azure SQL database.. + :vartype table: str :ivar refresh_type: Indicates the type of data refresh option. Known values are: "Static", "RefreshPeriodicallyWithFull", and "RefreshPeriodicallyWithDelta". :vartype refresh_type: str or ~azure.mgmt.streamanalytics.models.RefreshType @@ -1607,9 +1214,6 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): is used to fetch incremental changes from the SQL database. To use this option, we recommend using temporal tables in Azure SQL Database. :vartype delta_snapshot_query: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _validation = { @@ -1622,11 +1226,11 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): "database": {"key": "properties.database", "type": "str"}, "user": {"key": "properties.user", "type": "str"}, "password": {"key": "properties.password", "type": "str"}, + "table": {"key": "properties.table", "type": "str"}, "refresh_type": {"key": "properties.refreshType", "type": "str"}, "refresh_rate": {"key": "properties.refreshRate", "type": "str"}, "full_snapshot_query": {"key": "properties.fullSnapshotQuery", "type": "str"}, "delta_snapshot_query": {"key": "properties.deltaSnapshotQuery", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( @@ -1636,11 +1240,11 @@ def __init__( database: Optional[str] = None, user: Optional[str] = None, password: Optional[str] = None, + table: Optional[str] = None, refresh_type: Optional[Union[str, "_models.RefreshType"]] = None, refresh_rate: Optional[str] = None, full_snapshot_query: Optional[str] = None, delta_snapshot_query: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", **kwargs: Any ) -> None: """ @@ -1656,6 +1260,9 @@ def __init__( :keyword password: This element is associated with the datasource element. This is the password that will be used to connect to the SQL Database instance. :paramtype password: str + :keyword table: This element is associated with the datasource element. The name of the table + in the Azure SQL database.. + :paramtype table: str :keyword refresh_type: Indicates the type of data refresh option. Known values are: "Static", "RefreshPeriodicallyWithFull", and "RefreshPeriodicallyWithDelta". :paramtype refresh_type: str or ~azure.mgmt.streamanalytics.models.RefreshType @@ -1669,9 +1276,6 @@ def __init__( query is used to fetch incremental changes from the SQL database. To use this option, we recommend using temporal tables in Azure SQL Database. :paramtype delta_snapshot_query: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) self.type: str = "Microsoft.Sql/Server/Database" @@ -1679,11 +1283,11 @@ def __init__( self.database = database self.user = user self.password = password + self.table = table self.refresh_type = refresh_type self.refresh_rate = refresh_rate self.full_snapshot_query = full_snapshot_query self.delta_snapshot_query = delta_snapshot_query - self.authentication_mode = authentication_mode class AzureSynapseDataSourceProperties(_serialization.Model): @@ -1703,9 +1307,6 @@ class AzureSynapseDataSourceProperties(_serialization.Model): :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :vartype password: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _attribute_map = { @@ -1714,7 +1315,6 @@ class AzureSynapseDataSourceProperties(_serialization.Model): "table": {"key": "table", "type": "str"}, "user": {"key": "user", "type": "str"}, "password": {"key": "password", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, } def __init__( @@ -1725,7 +1325,6 @@ def __init__( table: Optional[str] = None, user: Optional[str] = None, password: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", **kwargs: Any ) -> None: """ @@ -1744,9 +1343,6 @@ def __init__( :keyword password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :paramtype password: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) self.server = server @@ -1754,13 +1350,12 @@ def __init__( self.table = table self.user = user self.password = password - self.authentication_mode = authentication_mode class AzureSynapseOutputDataSource(OutputDataSource): """Describes an Azure Synapse output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -1779,9 +1374,6 @@ class AzureSynapseOutputDataSource(OutputDataSource): :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :vartype password: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _validation = { @@ -1795,7 +1387,6 @@ class AzureSynapseOutputDataSource(OutputDataSource): "table": {"key": "properties.table", "type": "str"}, "user": {"key": "properties.user", "type": "str"}, "password": {"key": "properties.password", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( @@ -1806,7 +1397,6 @@ def __init__( table: Optional[str] = None, user: Optional[str] = None, password: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", **kwargs: Any ) -> None: """ @@ -1825,9 +1415,6 @@ def __init__( :keyword password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :paramtype password: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) self.type: str = "Microsoft.Sql/Server/DataWarehouse" @@ -1836,7 +1423,6 @@ def __init__( self.table = table self.user = user self.password = password - self.authentication_mode = authentication_mode class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): @@ -1856,66 +1442,13 @@ class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :vartype password: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ - _attribute_map = { - "server": {"key": "server", "type": "str"}, - "database": {"key": "database", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "user": {"key": "user", "type": "str"}, - "password": {"key": "password", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - server: Optional[str] = None, - database: Optional[str] = None, - table: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs: Any - ) -> None: - """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :paramtype database: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__( - server=server, - database=database, - table=table, - user=user, - password=password, - authentication_mode=authentication_mode, - **kwargs - ) - class AzureTableOutputDataSource(OutputDataSource): """Describes an Azure Table output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -2089,7 +1622,7 @@ def __init__( class BlobOutputDataSource(OutputDataSource): """Describes a blob output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -2119,8 +1652,6 @@ class BlobOutputDataSource(OutputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar blob_path_prefix: Blob path prefix. :vartype blob_path_prefix: str - :ivar blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :vartype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ _validation = { @@ -2136,7 +1667,6 @@ class BlobOutputDataSource(OutputDataSource): "time_format": {"key": "properties.timeFormat", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "blob_path_prefix": {"key": "properties.blobPathPrefix", "type": "str"}, - "blob_write_mode": {"key": "properties.blobWriteMode", "type": "str"}, } def __init__( @@ -2149,7 +1679,6 @@ def __init__( time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", blob_path_prefix: Optional[str] = None, - blob_write_mode: Optional[Union[str, "_models.BlobWriteMode"]] = None, **kwargs: Any ) -> None: """ @@ -2178,8 +1707,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword blob_path_prefix: Blob path prefix. :paramtype blob_path_prefix: str - :keyword blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :paramtype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ super().__init__(**kwargs) self.type: str = "Microsoft.Storage/Blob" @@ -2190,7 +1717,6 @@ def __init__( self.time_format = time_format self.authentication_mode = authentication_mode self.blob_path_prefix = blob_path_prefix - self.blob_write_mode = blob_write_mode class BlobOutputDataSourceProperties(BlobDataSourceProperties): @@ -2221,8 +1747,6 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar blob_path_prefix: Blob path prefix. :vartype blob_path_prefix: str - :ivar blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :vartype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ _attribute_map = { @@ -2233,7 +1757,6 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): "time_format": {"key": "timeFormat", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "blob_path_prefix": {"key": "blobPathPrefix", "type": "str"}, - "blob_write_mode": {"key": "blobWriteMode", "type": "str"}, } def __init__( @@ -2246,7 +1769,6 @@ def __init__( time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", blob_path_prefix: Optional[str] = None, - blob_write_mode: Optional[Union[str, "_models.BlobWriteMode"]] = None, **kwargs: Any ) -> None: """ @@ -2275,8 +1797,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword blob_path_prefix: Blob path prefix. :paramtype blob_path_prefix: str - :keyword blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :paramtype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ super().__init__( storage_accounts=storage_accounts, @@ -2288,13 +1808,12 @@ def __init__( **kwargs ) self.blob_path_prefix = blob_path_prefix - self.blob_write_mode = blob_write_mode -class BlobReferenceInputDataSource(ReferenceInputDataSource): # pylint: disable=too-many-instance-attributes +class BlobReferenceInputDataSource(ReferenceInputDataSource): """Describes a blob input data source that contains reference data. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests. Required. @@ -2322,17 +1841,6 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): # pylint: disable :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :ivar blob_name: The name of the blob input. - :vartype blob_name: str - :ivar delta_path_pattern: The path pattern of the delta snapshot. - :vartype delta_path_pattern: str - :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. - :vartype source_partition_count: int - :ivar full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :vartype full_snapshot_refresh_rate: str - :ivar delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :vartype delta_snapshot_refresh_rate: str """ _validation = { @@ -2347,11 +1855,6 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): # pylint: disable "date_format": {"key": "properties.dateFormat", "type": "str"}, "time_format": {"key": "properties.timeFormat", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, - "blob_name": {"key": "properties.blobName", "type": "str"}, - "delta_path_pattern": {"key": "properties.deltaPathPattern", "type": "str"}, - "source_partition_count": {"key": "properties.sourcePartitionCount", "type": "int"}, - "full_snapshot_refresh_rate": {"key": "properties.fullSnapshotRefreshRate", "type": "str"}, - "delta_snapshot_refresh_rate": {"key": "properties.deltaSnapshotRefreshRate", "type": "str"}, } def __init__( @@ -2363,11 +1866,6 @@ def __init__( date_format: Optional[str] = None, time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - blob_name: Optional[str] = None, - delta_path_pattern: Optional[str] = None, - source_partition_count: Optional[int] = None, - full_snapshot_refresh_rate: Optional[str] = None, - delta_snapshot_refresh_rate: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -2394,18 +1892,6 @@ def __init__( :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :keyword blob_name: The name of the blob input. - :paramtype blob_name: str - :keyword delta_path_pattern: The path pattern of the delta snapshot. - :paramtype delta_path_pattern: str - :keyword source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :paramtype source_partition_count: int - :keyword full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :paramtype full_snapshot_refresh_rate: str - :keyword delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :paramtype delta_snapshot_refresh_rate: str """ super().__init__(**kwargs) self.type: str = "Microsoft.Storage/Blob" @@ -2415,14 +1901,9 @@ def __init__( self.date_format = date_format self.time_format = time_format self.authentication_mode = authentication_mode - self.blob_name = blob_name - self.delta_path_pattern = delta_path_pattern - self.source_partition_count = source_partition_count - self.full_snapshot_refresh_rate = full_snapshot_refresh_rate - self.delta_snapshot_refresh_rate = delta_snapshot_refresh_rate -class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): # pylint: disable=too-many-instance-attributes +class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): """The properties that are associated with a blob input containing reference data. :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT @@ -2448,111 +1929,17 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): # pylin :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :ivar blob_name: The name of the blob input. - :vartype blob_name: str - :ivar delta_path_pattern: The path pattern of the delta snapshot. - :vartype delta_path_pattern: str - :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. - :vartype source_partition_count: int - :ivar full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :vartype full_snapshot_refresh_rate: str - :ivar delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :vartype delta_snapshot_refresh_rate: str """ - _attribute_map = { - "storage_accounts": {"key": "storageAccounts", "type": "[StorageAccount]"}, - "container": {"key": "container", "type": "str"}, - "path_pattern": {"key": "pathPattern", "type": "str"}, - "date_format": {"key": "dateFormat", "type": "str"}, - "time_format": {"key": "timeFormat", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, - "blob_name": {"key": "blobName", "type": "str"}, - "delta_path_pattern": {"key": "deltaPathPattern", "type": "str"}, - "source_partition_count": {"key": "sourcePartitionCount", "type": "int"}, - "full_snapshot_refresh_rate": {"key": "fullSnapshotRefreshRate", "type": "str"}, - "delta_snapshot_refresh_rate": {"key": "deltaSnapshotRefreshRate", "type": "str"}, - } - - def __init__( - self, - *, - storage_accounts: Optional[List["_models.StorageAccount"]] = None, - container: Optional[str] = None, - path_pattern: Optional[str] = None, - date_format: Optional[str] = None, - time_format: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - blob_name: Optional[str] = None, - delta_path_pattern: Optional[str] = None, - source_partition_count: Optional[int] = None, - full_snapshot_refresh_rate: Optional[str] = None, - delta_snapshot_refresh_rate: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :paramtype storage_accounts: list[~azure.mgmt.streamanalytics.models.StorageAccount] - :keyword container: The name of a container within the associated Storage account. This - container contains either the blob(s) to be read from or written to. Required on PUT - (CreateOrReplace) requests. - :paramtype container: str - :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more - detailed explanation and example. - :paramtype path_pattern: str - :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of - this property is used as the date format instead. - :paramtype date_format: str - :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of - this property is used as the time format instead. - :paramtype time_format: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :keyword blob_name: The name of the blob input. - :paramtype blob_name: str - :keyword delta_path_pattern: The path pattern of the delta snapshot. - :paramtype delta_path_pattern: str - :keyword source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :paramtype source_partition_count: int - :keyword full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :paramtype full_snapshot_refresh_rate: str - :keyword delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :paramtype delta_snapshot_refresh_rate: str - """ - super().__init__( - storage_accounts=storage_accounts, - container=container, - path_pattern=path_pattern, - date_format=date_format, - time_format=time_format, - authentication_mode=authentication_mode, - **kwargs - ) - self.blob_name = blob_name - self.delta_path_pattern = delta_path_pattern - self.source_partition_count = source_partition_count - self.full_snapshot_refresh_rate = full_snapshot_refresh_rate - self.delta_snapshot_refresh_rate = delta_snapshot_refresh_rate - class StreamInputDataSource(_serialization.Model): """Describes an input data source that contains stream data. You probably want to use the sub-classes and not this class directly. Known sub-classes are: GatewayMessageBusStreamInputDataSource, IoTHubStreamInputDataSource, - EventGridStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, - BlobStreamInputDataSource, RawStreamInputDataSource + EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -2571,11 +1958,9 @@ class StreamInputDataSource(_serialization.Model): "type": { "GatewayMessageBus": "GatewayMessageBusStreamInputDataSource", "Microsoft.Devices/IotHubs": "IoTHubStreamInputDataSource", - "Microsoft.EventGrid/EventSubscriptions": "EventGridStreamInputDataSource", "Microsoft.EventHub/EventHub": "EventHubV2StreamInputDataSource", "Microsoft.ServiceBus/EventHub": "EventHubStreamInputDataSource", "Microsoft.Storage/Blob": "BlobStreamInputDataSource", - "Raw": "RawStreamInputDataSource", } } @@ -2588,7 +1973,7 @@ def __init__(self, **kwargs: Any) -> None: class BlobStreamInputDataSource(StreamInputDataSource): """Describes a blob input data source that contains stream data. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -2786,7 +2171,7 @@ class Resource(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2821,7 +2206,7 @@ class TrackedResource(Resource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2860,13 +2245,13 @@ def __init__(self, *, tags: Optional[Dict[str, str]] = None, location: Optional[ self.location = location -class Cluster(TrackedResource): +class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes """A Stream Analytics Cluster object. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2884,8 +2269,20 @@ class Cluster(TrackedResource): detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :ivar properties: The properties associated with a Stream Analytics cluster. - :vartype properties: ~azure.mgmt.streamanalytics.models.ClusterProperties + :ivar created_date: The date this cluster was created. + :vartype created_date: ~datetime.datetime + :ivar cluster_id: Unique identifier for the cluster. + :vartype cluster_id: str + :ivar provisioning_state: The status of the cluster provisioning. The three terminal states + are: Succeeded, Failed and Canceled. Known values are: "Succeeded", "Failed", "Canceled", and + "InProgress". + :vartype provisioning_state: str or ~azure.mgmt.streamanalytics.models.ClusterProvisioningState + :ivar capacity_allocated: Represents the number of streaming units currently being used on the + cluster. + :vartype capacity_allocated: int + :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with + the cluster. If all of the jobs were running, this would be the capacity allocated. + :vartype capacity_assigned: int """ _validation = { @@ -2893,6 +2290,11 @@ class Cluster(TrackedResource): "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, + "created_date": {"readonly": True}, + "cluster_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "capacity_allocated": {"readonly": True}, + "capacity_assigned": {"readonly": True}, } _attribute_map = { @@ -2903,7 +2305,11 @@ class Cluster(TrackedResource): "location": {"key": "location", "type": "str"}, "sku": {"key": "sku", "type": "ClusterSku"}, "etag": {"key": "etag", "type": "str"}, - "properties": {"key": "properties", "type": "ClusterProperties"}, + "created_date": {"key": "properties.createdDate", "type": "iso-8601"}, + "cluster_id": {"key": "properties.clusterId", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "capacity_allocated": {"key": "properties.capacityAllocated", "type": "int"}, + "capacity_assigned": {"key": "properties.capacityAssigned", "type": "int"}, } def __init__( @@ -2912,7 +2318,6 @@ def __init__( tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, sku: Optional["_models.ClusterSku"] = None, - properties: Optional["_models.ClusterProperties"] = None, **kwargs: Any ) -> None: """ @@ -2923,13 +2328,15 @@ def __init__( :keyword sku: The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. :paramtype sku: ~azure.mgmt.streamanalytics.models.ClusterSku - :keyword properties: The properties associated with a Stream Analytics cluster. - :paramtype properties: ~azure.mgmt.streamanalytics.models.ClusterProperties """ super().__init__(tags=tags, location=location, **kwargs) self.sku = sku self.etag = None - self.properties = properties + self.created_date = None + self.cluster_id = None + self.provisioning_state = None + self.capacity_allocated = None + self.capacity_assigned = None class ClusterInfo(_serialization.Model): @@ -3043,73 +2450,26 @@ def __init__(self, **kwargs: Any) -> None: self.next_link = None -class ClusterProperties(_serialization.Model): - """The properties associated with a Stream Analytics cluster. - - Variables are only populated by the server, and will be ignored when sending a request. +class ClusterSku(_serialization.Model): + """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT + (CreateOrUpdate) requests. - :ivar created_date: The date this cluster was created. - :vartype created_date: ~datetime.datetime - :ivar cluster_id: Unique identifier for the cluster. - :vartype cluster_id: str - :ivar provisioning_state: The status of the cluster provisioning. The three terminal states - are: Succeeded, Failed and Canceled. Known values are: "Succeeded", "Failed", "Canceled", and - "InProgress". - :vartype provisioning_state: str or ~azure.mgmt.streamanalytics.models.ClusterProvisioningState - :ivar capacity_allocated: Represents the number of streaming units currently being used on the - cluster. - :vartype capacity_allocated: int - :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with - the cluster. If all of the jobs were running, this would be the capacity allocated. - :vartype capacity_assigned: int + :ivar name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. + "Default" + :vartype name: str or ~azure.mgmt.streamanalytics.models.ClusterSkuName + :ivar capacity: Denotes the number of streaming units the cluster can support. Valid values for + this property are multiples of 36 with a minimum value of 36 and maximum value of 216. Required + on PUT (CreateOrUpdate) requests. + :vartype capacity: int """ _validation = { - "created_date": {"readonly": True}, - "cluster_id": {"readonly": True}, - "provisioning_state": {"readonly": True}, - "capacity_allocated": {"readonly": True}, - "capacity_assigned": {"readonly": True}, + "capacity": {"maximum": 396, "minimum": 36}, } _attribute_map = { - "created_date": {"key": "createdDate", "type": "iso-8601"}, - "cluster_id": {"key": "clusterId", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "capacity_allocated": {"key": "capacityAllocated", "type": "int"}, - "capacity_assigned": {"key": "capacityAssigned", "type": "int"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.created_date = None - self.cluster_id = None - self.provisioning_state = None - self.capacity_allocated = None - self.capacity_assigned = None - - -class ClusterSku(_serialization.Model): - """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT - (CreateOrUpdate) requests. - - :ivar name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. - "Default" - :vartype name: str or ~azure.mgmt.streamanalytics.models.ClusterSkuName - :ivar capacity: Denotes the number of streaming units the cluster can support. Valid values for - this property are multiples of 36 with a minimum value of 36 and maximum value of 216. Required - on PUT (CreateOrUpdate) requests. - :vartype capacity: int - """ - - _validation = { - "capacity": {"maximum": 396, "minimum": 36}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "capacity": {"key": "capacity", "type": "int"}, + "name": {"key": "name", "type": "str"}, + "capacity": {"key": "capacity", "type": "int"}, } def __init__( @@ -3133,73 +2493,10 @@ def __init__( self.capacity = capacity -class CompileQuery(_serialization.Model): - """The query compilation object which defines the input, output, and transformation for the query - compilation. - - All required parameters must be populated in order to send to Azure. - - :ivar query: The query to compile. Required. - :vartype query: str - :ivar inputs: The inputs for the query compilation. - :vartype inputs: list[~azure.mgmt.streamanalytics.models.QueryInput] - :ivar functions: The functions for the query compilation. - :vartype functions: list[~azure.mgmt.streamanalytics.models.QueryFunction] - :ivar job_type: Describes the type of the job. Valid values are ``Cloud`` and 'Edge'. Required. - Known values are: "Cloud" and "Edge". - :vartype job_type: str or ~azure.mgmt.streamanalytics.models.JobType - :ivar compatibility_level: The query to compile. Known values are: "1.0" and "1.2". - :vartype compatibility_level: str or ~azure.mgmt.streamanalytics.models.CompatibilityLevel - """ - - _validation = { - "query": {"required": True}, - "job_type": {"required": True}, - } - - _attribute_map = { - "query": {"key": "query", "type": "str"}, - "inputs": {"key": "inputs", "type": "[QueryInput]"}, - "functions": {"key": "functions", "type": "[QueryFunction]"}, - "job_type": {"key": "jobType", "type": "str"}, - "compatibility_level": {"key": "compatibilityLevel", "type": "str"}, - } - - def __init__( - self, - *, - query: str, - job_type: Union[str, "_models.JobType"], - inputs: Optional[List["_models.QueryInput"]] = None, - functions: Optional[List["_models.QueryFunction"]] = None, - compatibility_level: Optional[Union[str, "_models.CompatibilityLevel"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword query: The query to compile. Required. - :paramtype query: str - :keyword inputs: The inputs for the query compilation. - :paramtype inputs: list[~azure.mgmt.streamanalytics.models.QueryInput] - :keyword functions: The functions for the query compilation. - :paramtype functions: list[~azure.mgmt.streamanalytics.models.QueryFunction] - :keyword job_type: Describes the type of the job. Valid values are ``Cloud`` and 'Edge'. - Required. Known values are: "Cloud" and "Edge". - :paramtype job_type: str or ~azure.mgmt.streamanalytics.models.JobType - :keyword compatibility_level: The query to compile. Known values are: "1.0" and "1.2". - :paramtype compatibility_level: str or ~azure.mgmt.streamanalytics.models.CompatibilityLevel - """ - super().__init__(**kwargs) - self.query = query - self.inputs = inputs - self.functions = functions - self.job_type = job_type - self.compatibility_level = compatibility_level - - class Compression(_serialization.Model): """Describes how input data is compressed. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of compression that the input uses. Required on PUT (CreateOrReplace) requests. Known values are: "None", "GZip", and "Deflate". @@ -3224,111 +2521,14 @@ def __init__(self, *, type: Union[str, "_models.CompressionType"] = "None", **kw self.type = type -class CSharpFunctionBinding(FunctionBinding): - """The binding to a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the function binding type. Required. - :vartype type: str - :ivar dll_path: The Csharp code containing a single function definition. - :vartype dll_path: str - :ivar class_property: The Csharp code containing a single function definition. - :vartype class_property: str - :ivar method: The Csharp code containing a single function definition. - :vartype method: str - :ivar update_mode: Refresh modes for Stream Analytics functions. Known values are: "Static" and - "Refreshable". - :vartype update_mode: str or ~azure.mgmt.streamanalytics.models.UpdateMode - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "dll_path": {"key": "properties.dllPath", "type": "str"}, - "class_property": {"key": "properties.class", "type": "str"}, - "method": {"key": "properties.method", "type": "str"}, - "update_mode": {"key": "properties.updateMode", "type": "str"}, - } - - def __init__( - self, - *, - dll_path: Optional[str] = None, - class_property: Optional[str] = None, - method: Optional[str] = None, - update_mode: Optional[Union[str, "_models.UpdateMode"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword dll_path: The Csharp code containing a single function definition. - :paramtype dll_path: str - :keyword class_property: The Csharp code containing a single function definition. - :paramtype class_property: str - :keyword method: The Csharp code containing a single function definition. - :paramtype method: str - :keyword update_mode: Refresh modes for Stream Analytics functions. Known values are: "Static" - and "Refreshable". - :paramtype update_mode: str or ~azure.mgmt.streamanalytics.models.UpdateMode - """ - super().__init__(**kwargs) - self.type: str = "Microsoft.StreamAnalytics/CLRUdf" - self.dll_path = dll_path - self.class_property = class_property - self.method = method - self.update_mode = update_mode - - -class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :ivar binding_type: Indicates the function binding type. Required. - :vartype binding_type: str - :ivar script: The CSharp code containing a single function definition. - :vartype script: str - :ivar udf_type: The function type. Default value is "Scalar". - :vartype udf_type: str - """ - - _validation = { - "binding_type": {"required": True}, - } - - _attribute_map = { - "binding_type": {"key": "bindingType", "type": "str"}, - "script": {"key": "bindingRetrievalProperties.script", "type": "str"}, - "udf_type": {"key": "bindingRetrievalProperties.udfType", "type": "str"}, - } - - def __init__( - self, *, script: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs: Any - ) -> None: - """ - :keyword script: The CSharp code containing a single function definition. - :paramtype script: str - :keyword udf_type: The function type. Default value is "Scalar". - :paramtype udf_type: str - """ - super().__init__(**kwargs) - self.binding_type: str = "Microsoft.StreamAnalytics/CLRUdf" - self.script = script - self.udf_type = udf_type - - class CsvSerialization(Serialization): """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", - "Parquet", and "Delta". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar field_delimiter: Specifies the delimiter that will be used to separate comma-separated value (CSV) records. See @@ -3377,98 +2577,6 @@ def __init__( self.encoding = encoding -class CustomClrSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an - output in custom format. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", - "Parquet", and "Delta". - :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType - :ivar serialization_dll_path: The serialization library path. - :vartype serialization_dll_path: str - :ivar serialization_class_name: The serialization class name. - :vartype serialization_class_name: str - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "serialization_dll_path": {"key": "properties.serializationDllPath", "type": "str"}, - "serialization_class_name": {"key": "properties.serializationClassName", "type": "str"}, - } - - def __init__( - self, - *, - serialization_dll_path: Optional[str] = None, - serialization_class_name: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword serialization_dll_path: The serialization library path. - :paramtype serialization_dll_path: str - :keyword serialization_class_name: The serialization class name. - :paramtype serialization_class_name: str - """ - super().__init__(**kwargs) - self.type: str = "CustomClr" - self.serialization_dll_path = serialization_dll_path - self.serialization_class_name = serialization_class_name - - -class DeltaSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an - output in Delta Lake format. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", - "Parquet", and "Delta". - :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType - :ivar delta_table_path: Specifies the path of the Delta Lake table that the output will be - written to. - :vartype delta_table_path: str - :ivar partition_columns: Specifies the names of the columns for which the Delta Lake table will - be partitioned. We are only supporting 1 partition column, but keeping it as an array for - extensibility. - :vartype partition_columns: list[str] - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "delta_table_path": {"key": "properties.deltaTablePath", "type": "str"}, - "partition_columns": {"key": "properties.partitionColumns", "type": "[str]"}, - } - - def __init__( - self, *, delta_table_path: Optional[str] = None, partition_columns: Optional[List[str]] = None, **kwargs: Any - ) -> None: - """ - :keyword delta_table_path: Specifies the path of the Delta Lake table that the output will be - written to. - :paramtype delta_table_path: str - :keyword partition_columns: Specifies the names of the columns for which the Delta Lake table - will be partitioned. We are only supporting 1 partition column, but keeping it as an array for - extensibility. - :paramtype partition_columns: list[str] - """ - super().__init__(**kwargs) - self.type: str = "Delta" - self.delta_table_path = delta_table_path - self.partition_columns = partition_columns - - class DiagnosticCondition(_serialization.Model): """Condition applicable to the resource, or to the job overall, that warrant customer attention. @@ -3532,7 +2640,7 @@ def __init__(self, **kwargs: Any) -> None: class DocumentDbOutputDataSource(OutputDataSource): """Describes a DocumentDB output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -3559,9 +2667,6 @@ class DocumentDbOutputDataSource(OutputDataSource): :ivar document_id: The name of the field in output events used to specify the primary key which insert or update operations are based on. :vartype document_id: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _validation = { @@ -3576,7 +2681,6 @@ class DocumentDbOutputDataSource(OutputDataSource): "collection_name_pattern": {"key": "properties.collectionNamePattern", "type": "str"}, "partition_key": {"key": "properties.partitionKey", "type": "str"}, "document_id": {"key": "properties.documentId", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( @@ -3588,7 +2692,6 @@ def __init__( collection_name_pattern: Optional[str] = None, partition_key: Optional[str] = None, document_id: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", **kwargs: Any ) -> None: """ @@ -3614,9 +2717,6 @@ def __init__( :keyword document_id: The name of the field in output events used to specify the primary key which insert or update operations are based on. :paramtype document_id: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) self.type: str = "Microsoft.Storage/DocumentDB" @@ -3626,7 +2726,6 @@ def __init__( self.collection_name_pattern = collection_name_pattern self.partition_key = partition_key self.document_id = document_id - self.authentication_mode = authentication_mode class Error(_serialization.Model): @@ -3757,69 +2856,6 @@ def __init__(self, **kwargs: Any) -> None: self.message = None -class EventGridStreamInputDataSource(StreamInputDataSource): - """Describes an event grid input data source that contains stream data. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of input data source containing stream data. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar subscriber: Subscribers for the Event Grid. Currently only EventHub Subscriber is - supported. - :vartype subscriber: ~azure.mgmt.streamanalytics.models.EventHubV2StreamInputDataSource - :ivar schema: Indicates the Event Grid schema type. Known values are: "EventGridEventSchema" - and "CloudEventSchema". - :vartype schema: str or ~azure.mgmt.streamanalytics.models.EventGridEventSchemaType - :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :vartype storage_accounts: list[~azure.mgmt.streamanalytics.models.StorageAccount] - :ivar event_types: List of Event Types that are supported by the Event Grid adapter. - :vartype event_types: list[str] - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "subscriber": {"key": "properties.subscriber", "type": "EventHubV2StreamInputDataSource"}, - "schema": {"key": "properties.schema", "type": "str"}, - "storage_accounts": {"key": "properties.storageAccounts", "type": "[StorageAccount]"}, - "event_types": {"key": "properties.eventTypes", "type": "[str]"}, - } - - def __init__( - self, - *, - subscriber: Optional["_models.EventHubV2StreamInputDataSource"] = None, - schema: Optional[Union[str, "_models.EventGridEventSchemaType"]] = None, - storage_accounts: Optional[List["_models.StorageAccount"]] = None, - event_types: Optional[List[str]] = None, - **kwargs: Any - ) -> None: - """ - :keyword subscriber: Subscribers for the Event Grid. Currently only EventHub Subscriber is - supported. - :paramtype subscriber: ~azure.mgmt.streamanalytics.models.EventHubV2StreamInputDataSource - :keyword schema: Indicates the Event Grid schema type. Known values are: "EventGridEventSchema" - and "CloudEventSchema". - :paramtype schema: str or ~azure.mgmt.streamanalytics.models.EventGridEventSchemaType - :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :paramtype storage_accounts: list[~azure.mgmt.streamanalytics.models.StorageAccount] - :keyword event_types: List of Event Types that are supported by the Event Grid adapter. - :paramtype event_types: list[str] - """ - super().__init__(**kwargs) - self.type: str = "Microsoft.EventGrid/EventSubscriptions" - self.subscriber = subscriber - self.schema = schema - self.storage_accounts = storage_accounts - self.event_types = event_types - - class ServiceBusDataSourceProperties(_serialization.Model): """The common properties that are associated with Service Bus data sources (Queues, Topics, Event Hubs, etc.). @@ -3892,8 +2928,6 @@ class EventHubDataSourceProperties(ServiceBusDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int """ _attribute_map = { @@ -3902,7 +2936,6 @@ class EventHubDataSourceProperties(ServiceBusDataSourceProperties): "shared_access_policy_key": {"key": "sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "event_hub_name": {"key": "eventHubName", "type": "str"}, - "partition_count": {"key": "partitionCount", "type": "int"}, } def __init__( @@ -3913,7 +2946,6 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, **kwargs: Any ) -> None: """ @@ -3931,8 +2963,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int """ super().__init__( service_bus_namespace=service_bus_namespace, @@ -3942,13 +2972,12 @@ def __init__( **kwargs ) self.event_hub_name = event_hub_name - self.partition_count = partition_count class EventHubOutputDataSource(OutputDataSource): """Describes an Event Hub output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -3967,8 +2996,6 @@ class EventHubOutputDataSource(OutputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar partition_key: The key/column that is used to determine to which partition to send event data. :vartype partition_key: str @@ -3987,7 +3014,6 @@ class EventHubOutputDataSource(OutputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "partition_key": {"key": "properties.partitionKey", "type": "str"}, "property_columns": {"key": "properties.propertyColumns", "type": "[str]"}, } @@ -4000,7 +3026,6 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, partition_key: Optional[str] = None, property_columns: Optional[List[str]] = None, **kwargs: Any @@ -4020,8 +3045,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword partition_key: The key/column that is used to determine to which partition to send event data. :paramtype partition_key: str @@ -4035,7 +3058,6 @@ def __init__( self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.partition_key = partition_key self.property_columns = property_columns @@ -4057,8 +3079,6 @@ class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar partition_key: The key/column that is used to determine to which partition to send event data. :vartype partition_key: str @@ -4072,7 +3092,6 @@ class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): "shared_access_policy_key": {"key": "sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "event_hub_name": {"key": "eventHubName", "type": "str"}, - "partition_count": {"key": "partitionCount", "type": "int"}, "partition_key": {"key": "partitionKey", "type": "str"}, "property_columns": {"key": "propertyColumns", "type": "[str]"}, } @@ -4085,7 +3104,6 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, partition_key: Optional[str] = None, property_columns: Optional[List[str]] = None, **kwargs: Any @@ -4105,8 +3123,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword partition_key: The key/column that is used to determine to which partition to send event data. :paramtype partition_key: str @@ -4119,7 +3135,6 @@ def __init__( shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, - partition_count=partition_count, **kwargs ) self.partition_key = partition_key @@ -4129,7 +3144,7 @@ def __init__( class EventHubStreamInputDataSource(StreamInputDataSource): """Describes an Event Hub input data source that contains stream data. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -4148,16 +3163,11 @@ class EventHubStreamInputDataSource(StreamInputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :vartype consumer_group_name: str - :ivar prefetch_count: The number of messages that the message receiver can simultaneously - request. - :vartype prefetch_count: int """ _validation = { @@ -4171,9 +3181,7 @@ class EventHubStreamInputDataSource(StreamInputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "consumer_group_name": {"key": "properties.consumerGroupName", "type": "str"}, - "prefetch_count": {"key": "properties.prefetchCount", "type": "int"}, } def __init__( @@ -4184,9 +3192,7 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, consumer_group_name: Optional[str] = None, - prefetch_count: Optional[int] = None, **kwargs: Any ) -> None: """ @@ -4204,16 +3210,11 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :paramtype consumer_group_name: str - :keyword prefetch_count: The number of messages that the message receiver can simultaneously - request. - :paramtype prefetch_count: int """ super().__init__(**kwargs) self.type: str = "Microsoft.ServiceBus/EventHub" @@ -4222,9 +3223,7 @@ def __init__( self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.consumer_group_name = consumer_group_name - self.prefetch_count = prefetch_count class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): @@ -4244,16 +3243,11 @@ class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :vartype consumer_group_name: str - :ivar prefetch_count: The number of messages that the message receiver can simultaneously - request. - :vartype prefetch_count: int """ _attribute_map = { @@ -4262,9 +3256,7 @@ class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): "shared_access_policy_key": {"key": "sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "event_hub_name": {"key": "eventHubName", "type": "str"}, - "partition_count": {"key": "partitionCount", "type": "int"}, "consumer_group_name": {"key": "consumerGroupName", "type": "str"}, - "prefetch_count": {"key": "prefetchCount", "type": "int"}, } def __init__( @@ -4275,9 +3267,7 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, consumer_group_name: Optional[str] = None, - prefetch_count: Optional[int] = None, **kwargs: Any ) -> None: """ @@ -4295,16 +3285,11 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :paramtype consumer_group_name: str - :keyword prefetch_count: The number of messages that the message receiver can simultaneously - request. - :paramtype prefetch_count: int """ super().__init__( service_bus_namespace=service_bus_namespace, @@ -4312,17 +3297,15 @@ def __init__( shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, - partition_count=partition_count, **kwargs ) self.consumer_group_name = consumer_group_name - self.prefetch_count = prefetch_count class EventHubV2OutputDataSource(OutputDataSource): """Describes an Event Hub output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -4341,8 +3324,6 @@ class EventHubV2OutputDataSource(OutputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar partition_key: The key/column that is used to determine to which partition to send event data. :vartype partition_key: str @@ -4361,7 +3342,6 @@ class EventHubV2OutputDataSource(OutputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "partition_key": {"key": "properties.partitionKey", "type": "str"}, "property_columns": {"key": "properties.propertyColumns", "type": "[str]"}, } @@ -4374,7 +3354,6 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, partition_key: Optional[str] = None, property_columns: Optional[List[str]] = None, **kwargs: Any @@ -4394,8 +3373,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword partition_key: The key/column that is used to determine to which partition to send event data. :paramtype partition_key: str @@ -4409,7 +3386,6 @@ def __init__( self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.partition_key = partition_key self.property_columns = property_columns @@ -4417,7 +3393,7 @@ def __init__( class EventHubV2StreamInputDataSource(StreamInputDataSource): """Describes an Event Hub input data source that contains stream data. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -4436,16 +3412,11 @@ class EventHubV2StreamInputDataSource(StreamInputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :vartype consumer_group_name: str - :ivar prefetch_count: The number of messages that the message receiver can simultaneously - request. - :vartype prefetch_count: int """ _validation = { @@ -4459,9 +3430,7 @@ class EventHubV2StreamInputDataSource(StreamInputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "consumer_group_name": {"key": "properties.consumerGroupName", "type": "str"}, - "prefetch_count": {"key": "properties.prefetchCount", "type": "int"}, } def __init__( @@ -4472,9 +3441,7 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, consumer_group_name: Optional[str] = None, - prefetch_count: Optional[int] = None, **kwargs: Any ) -> None: """ @@ -4492,16 +3459,11 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :paramtype consumer_group_name: str - :keyword prefetch_count: The number of messages that the message receiver can simultaneously - request. - :paramtype prefetch_count: int """ super().__init__(**kwargs) self.type: str = "Microsoft.EventHub/EventHub" @@ -4510,63 +3472,13 @@ def __init__( self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.consumer_group_name = consumer_group_name - self.prefetch_count = prefetch_count - - -class External(_serialization.Model): - """The storage account where the custom code artifacts are located. - - :ivar storage_account: The properties that are associated with an Azure Storage account. - :vartype storage_account: ~azure.mgmt.streamanalytics.models.StorageAccount - :ivar container: The UserCustomCode container. - :vartype container: str - :ivar path: The UserCustomCode path. - :vartype path: str - :ivar refresh_configuration: The refresh parameters for any/all updatable user defined - functions present in the job config. - :vartype refresh_configuration: ~azure.mgmt.streamanalytics.models.RefreshConfiguration - """ - - _attribute_map = { - "storage_account": {"key": "storageAccount", "type": "StorageAccount"}, - "container": {"key": "container", "type": "str"}, - "path": {"key": "path", "type": "str"}, - "refresh_configuration": {"key": "refreshConfiguration", "type": "RefreshConfiguration"}, - } - - def __init__( - self, - *, - storage_account: Optional["_models.StorageAccount"] = None, - container: Optional[str] = None, - path: Optional[str] = None, - refresh_configuration: Optional["_models.RefreshConfiguration"] = None, - **kwargs: Any - ) -> None: - """ - :keyword storage_account: The properties that are associated with an Azure Storage account. - :paramtype storage_account: ~azure.mgmt.streamanalytics.models.StorageAccount - :keyword container: The UserCustomCode container. - :paramtype container: str - :keyword path: The UserCustomCode path. - :paramtype path: str - :keyword refresh_configuration: The refresh parameters for any/all updatable user defined - functions present in the job config. - :paramtype refresh_configuration: ~azure.mgmt.streamanalytics.models.RefreshConfiguration - """ - super().__init__(**kwargs) - self.storage_account = storage_account - self.container = container - self.path = path - self.refresh_configuration = refresh_configuration class FileReferenceInputDataSource(ReferenceInputDataSource): """Describes a file input data source that contains reference data. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests. Required. @@ -4759,7 +3671,7 @@ def __init__(self, *, data_type: Optional[str] = None, **kwargs: Any) -> None: class GatewayMessageBusOutputDataSource(OutputDataSource): """Describes a Gateway Message Bus output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -4807,29 +3719,18 @@ def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: self.topic = topic -class GatewayMessageBusOutputDataSourceProperties(GatewayMessageBusSourceProperties): +class GatewayMessageBusOutputDataSourceProperties(GatewayMessageBusSourceProperties): # pylint: disable=name-too-long """The properties that are associated with a Gateway Message Bus. :ivar topic: The name of the Service Bus topic. :vartype topic: str """ - _attribute_map = { - "topic": {"key": "topic", "type": "str"}, - } - - def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword topic: The name of the Service Bus topic. - :paramtype topic: str - """ - super().__init__(topic=topic, **kwargs) - class GatewayMessageBusStreamInputDataSource(StreamInputDataSource): """Describes a blob input data source that contains stream data. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -4857,123 +3758,27 @@ def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: self.topic = topic -class GatewayMessageBusStreamInputDataSourceProperties(GatewayMessageBusSourceProperties): +class GatewayMessageBusStreamInputDataSourceProperties( + GatewayMessageBusSourceProperties +): # pylint: disable=name-too-long """The properties that are associated with a gateway message bus input containing stream data. :ivar topic: The name of the Service Bus topic. :vartype topic: str """ - _attribute_map = { - "topic": {"key": "topic", "type": "str"}, - } - - def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword topic: The name of the Service Bus topic. - :paramtype topic: str - """ - super().__init__(topic=topic, **kwargs) - -class GetStreamingJobSkuResult(_serialization.Model): - """Describes an available SKU information. +class Identity(_serialization.Model): + """Describes how identity is verified. Variables are only populated by the server, and will be ignored when sending a request. - :ivar resource_type: The type of resource the SKU applies to. - "Microsoft.StreamAnalytics/streamingjobs" - :vartype resource_type: str or ~azure.mgmt.streamanalytics.models.ResourceType - :ivar sku: The properties that are associated with a SKU. - :vartype sku: ~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResultSku - :ivar capacity: Describes scaling information of a SKU. - :vartype capacity: ~azure.mgmt.streamanalytics.models.SkuCapacity - """ - - _validation = { - "resource_type": {"readonly": True}, - "sku": {"readonly": True}, - "capacity": {"readonly": True}, - } - - _attribute_map = { - "resource_type": {"key": "resourceType", "type": "str"}, - "sku": {"key": "sku", "type": "GetStreamingJobSkuResultSku"}, - "capacity": {"key": "capacity", "type": "SkuCapacity"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.resource_type = None - self.sku = None - self.capacity = None - - -class GetStreamingJobSkuResults(_serialization.Model): - """Result of the request to get streaming job SKUs. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of available SKUs that the streaming job can use. - :vartype value: list[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - :ivar next_link: The link (url) to the next page of results. - :vartype next_link: str - """ - - _validation = { - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[GetStreamingJobSkuResult]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, *, value: Optional[List["_models.GetStreamingJobSkuResult"]] = None, **kwargs: Any) -> None: - """ - :keyword value: The list of available SKUs that the streaming job can use. - :paramtype value: list[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - """ - super().__init__(**kwargs) - self.value = value - self.next_link = None - - -class GetStreamingJobSkuResultSku(_serialization.Model): - """The properties that are associated with a SKU. - - :ivar name: The name of the SKU. "Standard" - :vartype name: str or ~azure.mgmt.streamanalytics.models.SkuName - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - } - - def __init__(self, *, name: Optional[Union[str, "_models.SkuName"]] = None, **kwargs: Any) -> None: - """ - :keyword name: The name of the SKU. "Standard" - :paramtype name: str or ~azure.mgmt.streamanalytics.models.SkuName - """ - super().__init__(**kwargs) - self.name = name - - -class Identity(_serialization.Model): - """Describes how identity is verified. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar tenant_id: The tenantId of the identity. + :ivar tenant_id: The identity tenantId. :vartype tenant_id: str - :ivar principal_id: The principalId of the identity. + :ivar principal_id: The identity principal ID. :vartype principal_id: str - :ivar type: The type of identity, can be SystemAssigned or UserAssigned. + :ivar type: The identity type. :vartype type: str - :ivar user_assigned_identities: The user assigned identities associated with the streaming job - resource. - :vartype user_assigned_identities: dict[str, JSON] """ _validation = { @@ -4985,24 +3790,17 @@ class Identity(_serialization.Model): "tenant_id": {"key": "tenantId", "type": "str"}, "principal_id": {"key": "principalId", "type": "str"}, "type": {"key": "type", "type": "str"}, - "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{object}"}, } - def __init__( - self, *, type: Optional[str] = None, user_assigned_identities: Optional[Dict[str, JSON]] = None, **kwargs: Any - ) -> None: + def __init__(self, *, type: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword type: The type of identity, can be SystemAssigned or UserAssigned. + :keyword type: The identity type. :paramtype type: str - :keyword user_assigned_identities: The user assigned identities associated with the streaming - job resource. - :paramtype user_assigned_identities: dict[str, JSON] """ super().__init__(**kwargs) self.tenant_id = None self.principal_id = None self.type = type - self.user_assigned_identities = user_assigned_identities class Input(SubResource): @@ -5084,7 +3882,7 @@ class InputProperties(_serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates whether the input is a source of reference data or stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -5104,8 +3902,6 @@ class InputProperties(_serialization.Model): :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :vartype partition_key: str - :ivar watermark_settings: Settings which determine whether to read watermark events. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties """ _validation = { @@ -5121,7 +3917,6 @@ class InputProperties(_serialization.Model): "etag": {"key": "etag", "type": "str"}, "compression": {"key": "compression", "type": "Compression"}, "partition_key": {"key": "partitionKey", "type": "str"}, - "watermark_settings": {"key": "watermarkSettings", "type": "InputWatermarkProperties"}, } _subtype_map = {"type": {"Reference": "ReferenceInputProperties", "Stream": "StreamInputProperties"}} @@ -5132,7 +3927,6 @@ def __init__( serialization: Optional["_models.Serialization"] = None, compression: Optional["_models.Compression"] = None, partition_key: Optional[str] = None, - watermark_settings: Optional["_models.InputWatermarkProperties"] = None, **kwargs: Any ) -> None: """ @@ -5144,8 +3938,6 @@ def __init__( :keyword partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :paramtype partition_key: str - :keyword watermark_settings: Settings which determine whether to read watermark events. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties """ super().__init__(**kwargs) self.type: Optional[str] = None @@ -5154,36 +3946,12 @@ def __init__( self.etag = None self.compression = compression self.partition_key = partition_key - self.watermark_settings = watermark_settings - - -class InputWatermarkProperties(_serialization.Model): - """Settings which determine whether to read watermark events. - - :ivar watermark_mode: The input watermark mode. Known values are: "None" and "ReadWatermark". - :vartype watermark_mode: str or ~azure.mgmt.streamanalytics.models.InputWatermarkMode - """ - - _attribute_map = { - "watermark_mode": {"key": "watermarkMode", "type": "str"}, - } - - def __init__( - self, *, watermark_mode: Optional[Union[str, "_models.InputWatermarkMode"]] = None, **kwargs: Any - ) -> None: - """ - :keyword watermark_mode: The input watermark mode. Known values are: "None" and - "ReadWatermark". - :paramtype watermark_mode: str or ~azure.mgmt.streamanalytics.models.InputWatermarkMode - """ - super().__init__(**kwargs) - self.watermark_mode = watermark_mode class IoTHubStreamInputDataSource(StreamInputDataSource): """Describes an IoT Hub input data source that contains stream data. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of input data source containing stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -5259,7 +4027,7 @@ def __init__( class JavaScriptFunctionBinding(FunctionBinding): """The binding to a JavaScript function. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the function binding type. Required. :vartype type: str @@ -5288,10 +4056,12 @@ def __init__(self, *, script: Optional[str] = None, **kwargs: Any) -> None: self.script = script -class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): +class JavaScriptFunctionRetrieveDefaultDefinitionParameters( + FunctionRetrieveDefaultDefinitionParameters +): # pylint: disable=name-too-long """The parameters needed to retrieve the default function definition for a JavaScript function. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar binding_type: Indicates the function binding type. Required. :vartype binding_type: str @@ -5337,25 +4107,14 @@ class StorageAccount(_serialization.Model): :ivar account_key: The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. :vartype account_key: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _attribute_map = { "account_name": {"key": "accountName", "type": "str"}, "account_key": {"key": "accountKey", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, } - def __init__( - self, - *, - account_name: Optional[str] = None, - account_key: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs: Any - ) -> None: + def __init__(self, *, account_name: Optional[str] = None, account_key: Optional[str] = None, **kwargs: Any) -> None: """ :keyword account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. @@ -5363,14 +4122,10 @@ def __init__( :keyword account_key: The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. :paramtype account_key: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) self.account_name = account_name self.account_key = account_key - self.authentication_mode = authentication_mode class JobStorageAccount(StorageAccount): @@ -5412,20 +4167,18 @@ def __init__( "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ - super().__init__( - account_name=account_name, account_key=account_key, authentication_mode=authentication_mode, **kwargs - ) + super().__init__(account_name=account_name, account_key=account_key, **kwargs) + self.authentication_mode = authentication_mode class JsonSerialization(Serialization): """Describes how data from an input is serialized or how data is serialized when written to an output in JSON format. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", - "Parquet", and "Delta". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -5476,34 +4229,6 @@ def __init__( self.format = format -class LastOutputEventTimestamp(_serialization.Model): - """An output event timestamp. - - :ivar last_output_event_time: The last output event time. - :vartype last_output_event_time: str - :ivar last_update_time: The time that the last update happened. - :vartype last_update_time: str - """ - - _attribute_map = { - "last_output_event_time": {"key": "lastOutputEventTime", "type": "str"}, - "last_update_time": {"key": "lastUpdateTime", "type": "str"}, - } - - def __init__( - self, *, last_output_event_time: Optional[str] = None, last_update_time: Optional[str] = None, **kwargs: Any - ) -> None: - """ - :keyword last_output_event_time: The last output event time. - :paramtype last_output_event_time: str - :keyword last_update_time: The time that the last update happened. - :paramtype last_update_time: str - """ - super().__init__(**kwargs) - self.last_output_event_time = last_output_event_time - self.last_update_time = last_update_time - - class Operation(_serialization.Model): """A Stream Analytics REST API operation. @@ -5609,7 +4334,7 @@ def __init__(self, **kwargs: Any) -> None: self.next_link = None -class Output(SubResource): # pylint: disable=too-many-instance-attributes +class Output(SubResource): """An output object, containing all information associated with the named output. All outputs are contained under a streaming job. @@ -5638,12 +4363,6 @@ class Output(SubResource): # pylint: disable=too-many-instance-attributes detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :ivar last_output_event_timestamps: A list of the last output event times for each output - partition. The index of the array corresponds to the partition number. - :vartype last_output_event_timestamps: - list[~azure.mgmt.streamanalytics.models.LastOutputEventTimestamp] - :ivar watermark_settings: Settings which determine whether to send watermarks to downstream. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.OutputWatermarkProperties """ _validation = { @@ -5651,7 +4370,6 @@ class Output(SubResource): # pylint: disable=too-many-instance-attributes "type": {"readonly": True}, "diagnostics": {"readonly": True}, "etag": {"readonly": True}, - "last_output_event_timestamps": {"readonly": True}, } _attribute_map = { @@ -5664,11 +4382,6 @@ class Output(SubResource): # pylint: disable=too-many-instance-attributes "serialization": {"key": "properties.serialization", "type": "Serialization"}, "diagnostics": {"key": "properties.diagnostics", "type": "Diagnostics"}, "etag": {"key": "properties.etag", "type": "str"}, - "last_output_event_timestamps": { - "key": "properties.lastOutputEventTimestamps", - "type": "[LastOutputEventTimestamp]", - }, - "watermark_settings": {"key": "properties.watermarkSettings", "type": "OutputWatermarkProperties"}, } def __init__( @@ -5679,7 +4392,6 @@ def __init__( time_window: Optional[str] = None, size_window: Optional[int] = None, serialization: Optional["_models.Serialization"] = None, - watermark_settings: Optional["_models.OutputWatermarkProperties"] = None, **kwargs: Any ) -> None: """ @@ -5695,8 +4407,6 @@ def __init__( :keyword serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. :paramtype serialization: ~azure.mgmt.streamanalytics.models.Serialization - :keyword watermark_settings: Settings which determine whether to send watermarks to downstream. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.OutputWatermarkProperties """ super().__init__(name=name, **kwargs) self.datasource = datasource @@ -5705,8 +4415,6 @@ def __init__( self.serialization = serialization self.diagnostics = None self.etag = None - self.last_output_event_timestamps = None - self.watermark_settings = watermark_settings class OutputListResult(_serialization.Model): @@ -5737,53 +4445,14 @@ def __init__(self, **kwargs: Any) -> None: self.next_link = None -class OutputWatermarkProperties(_serialization.Model): - """Settings which determine whether to send watermarks to downstream. - - :ivar watermark_mode: The output watermark mode. Known values are: "None", - "SendCurrentPartitionWatermark", and "SendLowestWatermarkAcrossPartitions". - :vartype watermark_mode: str or ~azure.mgmt.streamanalytics.models.OutputWatermarkMode - :ivar max_watermark_difference_across_partitions: Describes the maximal delta between the - fastest and slowest partitions, so the out of order window that catches all necessary events in - downstream jobs is well defined. - :vartype max_watermark_difference_across_partitions: str - """ - - _attribute_map = { - "watermark_mode": {"key": "watermarkMode", "type": "str"}, - "max_watermark_difference_across_partitions": {"key": "maxWatermarkDifferenceAcrossPartitions", "type": "str"}, - } - - def __init__( - self, - *, - watermark_mode: Optional[Union[str, "_models.OutputWatermarkMode"]] = None, - max_watermark_difference_across_partitions: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword watermark_mode: The output watermark mode. Known values are: "None", - "SendCurrentPartitionWatermark", and "SendLowestWatermarkAcrossPartitions". - :paramtype watermark_mode: str or ~azure.mgmt.streamanalytics.models.OutputWatermarkMode - :keyword max_watermark_difference_across_partitions: Describes the maximal delta between the - fastest and slowest partitions, so the out of order window that catches all necessary events in - downstream jobs is well defined. - :paramtype max_watermark_difference_across_partitions: str - """ - super().__init__(**kwargs) - self.watermark_mode = watermark_mode - self.max_watermark_difference_across_partitions = max_watermark_difference_across_partitions - - class ParquetSerialization(Serialization): """Describes how data from an input is serialized or how data is serialized when written to an output in Parquet format. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", - "Parquet", and "Delta". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. @@ -5810,370 +4479,117 @@ def __init__(self, *, properties: Optional[JSON] = None, **kwargs: Any) -> None: self.properties = properties -class PostgreSQLDataSourceProperties(_serialization.Model): - """The properties that are associated with an Azure SQL database data source. +class PowerBIOutputDataSource(OutputDataSource): + """Describes a Power BI output data source. - :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :vartype server: str - :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :vartype database: str - :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) + All required parameters must be populated in order to send to server. + + :ivar type: Indicates the type of data source output will be written to. Required on PUT + (CreateOrReplace) requests. Required. + :vartype type: str + :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + :vartype refresh_token: str + :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :vartype token_user_principal_name: str + :ivar token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :vartype token_user_display_name: str + :ivar dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :vartype dataset: str + :ivar table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. :vartype table: str - :ivar user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :vartype user: str - :ivar password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :vartype password: str - :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query - partition) are available. Optional on PUT requests. - :vartype max_writer_count: float + :ivar group_id: The ID of the Power BI group. + :vartype group_id: str + :ivar group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :vartype group_name: str :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ + _validation = { + "type": {"required": True}, + } + _attribute_map = { - "server": {"key": "server", "type": "str"}, - "database": {"key": "database", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "user": {"key": "user", "type": "str"}, - "password": {"key": "password", "type": "str"}, - "max_writer_count": {"key": "maxWriterCount", "type": "float"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "refresh_token": {"key": "properties.refreshToken", "type": "str"}, + "token_user_principal_name": {"key": "properties.tokenUserPrincipalName", "type": "str"}, + "token_user_display_name": {"key": "properties.tokenUserDisplayName", "type": "str"}, + "dataset": {"key": "properties.dataset", "type": "str"}, + "table": {"key": "properties.table", "type": "str"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "group_name": {"key": "properties.groupName", "type": "str"}, + "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( self, *, - server: Optional[str] = None, - database: Optional[str] = None, + refresh_token: Optional[str] = None, + token_user_principal_name: Optional[str] = None, + token_user_display_name: Optional[str] = None, + dataset: Optional[str] = None, table: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - max_writer_count: Optional[float] = None, + group_id: Optional[str] = None, + group_name: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", **kwargs: Any ) -> None: """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + :keyword refresh_token: A refresh token that can be used to obtain a valid access token that + can then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - :paramtype database: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT + :paramtype refresh_token: str + :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :paramtype token_user_principal_name: str + :keyword token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :paramtype token_user_display_name: str + :keyword dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :paramtype dataset: str + :keyword table: The name of the Power BI table under the specified dataset. Required on PUT (CreateOrReplace) requests. :paramtype table: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :paramtype max_writer_count: float + :keyword group_id: The ID of the Power BI group. + :paramtype group_id: str + :keyword group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :paramtype group_name: str :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) - self.server = server - self.database = database + self.type: str = "PowerBI" + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + self.dataset = dataset self.table = table - self.user = user - self.password = password - self.max_writer_count = max_writer_count + self.group_id = group_id + self.group_name = group_name self.authentication_mode = authentication_mode -class PostgreSQLOutputDataSource(OutputDataSource): - """Describes a PostgreSQL output data source. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of data source output will be written to. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :vartype server: str - :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :vartype database: str - :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :vartype table: str - :ivar user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :vartype user: str - :ivar password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :vartype password: str - :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query - partition) are available. Optional on PUT requests. - :vartype max_writer_count: float - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "server": {"key": "properties.server", "type": "str"}, - "database": {"key": "properties.database", "type": "str"}, - "table": {"key": "properties.table", "type": "str"}, - "user": {"key": "properties.user", "type": "str"}, - "password": {"key": "properties.password", "type": "str"}, - "max_writer_count": {"key": "properties.maxWriterCount", "type": "float"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - server: Optional[str] = None, - database: Optional[str] = None, - table: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - max_writer_count: Optional[float] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs: Any - ) -> None: - """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :paramtype database: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :paramtype max_writer_count: float - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__(**kwargs) - self.type: str = "Microsoft.DBForPostgreSQL/servers/databases" - self.server = server - self.database = database - self.table = table - self.user = user - self.password = password - self.max_writer_count = max_writer_count - self.authentication_mode = authentication_mode - - -class PostgreSQLOutputDataSourceProperties(PostgreSQLDataSourceProperties): - """The properties that are associated with a PostgreSQL output. - - :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :vartype server: str - :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :vartype database: str - :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :vartype table: str - :ivar user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :vartype user: str - :ivar password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :vartype password: str - :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query - partition) are available. Optional on PUT requests. - :vartype max_writer_count: float - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - - _attribute_map = { - "server": {"key": "server", "type": "str"}, - "database": {"key": "database", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "user": {"key": "user", "type": "str"}, - "password": {"key": "password", "type": "str"}, - "max_writer_count": {"key": "maxWriterCount", "type": "float"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - server: Optional[str] = None, - database: Optional[str] = None, - table: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - max_writer_count: Optional[float] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs: Any - ) -> None: - """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :paramtype database: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :paramtype max_writer_count: float - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__( - server=server, - database=database, - table=table, - user=user, - password=password, - max_writer_count=max_writer_count, - authentication_mode=authentication_mode, - **kwargs - ) - - -class PowerBIOutputDataSource(OutputDataSource): - """Describes a Power BI output data source. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of data source output will be written to. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can - then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :vartype refresh_token: str - :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :vartype token_user_principal_name: str - :ivar token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :vartype token_user_display_name: str - :ivar dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :vartype dataset: str - :ivar table: The name of the Power BI table under the specified dataset. Required on PUT - (CreateOrReplace) requests. - :vartype table: str - :ivar group_id: The ID of the Power BI group. - :vartype group_id: str - :ivar group_name: The name of the Power BI group. Use this property to help remember which - specific Power BI group id was used. - :vartype group_name: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "refresh_token": {"key": "properties.refreshToken", "type": "str"}, - "token_user_principal_name": {"key": "properties.tokenUserPrincipalName", "type": "str"}, - "token_user_display_name": {"key": "properties.tokenUserDisplayName", "type": "str"}, - "dataset": {"key": "properties.dataset", "type": "str"}, - "table": {"key": "properties.table", "type": "str"}, - "group_id": {"key": "properties.groupId", "type": "str"}, - "group_name": {"key": "properties.groupName", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - refresh_token: Optional[str] = None, - token_user_principal_name: Optional[str] = None, - token_user_display_name: Optional[str] = None, - dataset: Optional[str] = None, - table: Optional[str] = None, - group_id: Optional[str] = None, - group_name: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs: Any - ) -> None: - """ - :keyword refresh_token: A refresh token that can be used to obtain a valid access token that - can then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :paramtype refresh_token: str - :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :paramtype token_user_principal_name: str - :keyword token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :paramtype token_user_display_name: str - :keyword dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :paramtype dataset: str - :keyword table: The name of the Power BI table under the specified dataset. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword group_id: The ID of the Power BI group. - :paramtype group_id: str - :keyword group_name: The name of the Power BI group. Use this property to help remember which - specific Power BI group id was used. - :paramtype group_name: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__(**kwargs) - self.type: str = "PowerBI" - self.refresh_token = refresh_token - self.token_user_principal_name = token_user_principal_name - self.token_user_display_name = token_user_display_name - self.dataset = dataset - self.table = table - self.group_id = group_id - self.group_name = group_name - self.authentication_mode = authentication_mode - - -class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): - """The properties that are associated with a Power BI output. +class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): + """The properties that are associated with a Power BI output. :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only @@ -6279,7 +4695,7 @@ class ProxyResource(Resource): Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -6288,102 +4704,24 @@ class ProxyResource(Resource): :vartype type: str """ - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - class PrivateEndpoint(ProxyResource): - """Complete information about the private endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :ivar properties: The properties associated with a private endpoint. - :vartype properties: ~azure.mgmt.streamanalytics.models.PrivateEndpointProperties - :ivar etag: Unique opaque string (generally a GUID) that represents the metadata state of the - resource (private endpoint) and changes whenever the resource is updated. Required on PUT - (CreateOrUpdate) requests. - :vartype etag: str - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "properties": {"key": "properties", "type": "PrivateEndpointProperties"}, - "etag": {"key": "etag", "type": "str"}, - } - - def __init__(self, *, properties: Optional["_models.PrivateEndpointProperties"] = None, **kwargs: Any) -> None: - """ - :keyword properties: The properties associated with a private endpoint. - :paramtype properties: ~azure.mgmt.streamanalytics.models.PrivateEndpointProperties - """ - super().__init__(**kwargs) - self.properties = properties - self.etag = None - - -class PrivateEndpointListResult(_serialization.Model): - """A list of private endpoints. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: A list of private endpoints. - :vartype value: list[~azure.mgmt.streamanalytics.models.PrivateEndpoint] - :ivar next_link: The URL to fetch the next set of private endpoints. - :vartype next_link: str - """ - - _validation = { - "value": {"readonly": True}, - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[PrivateEndpoint]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.value = None - self.next_link = None - - -class PrivateEndpointProperties(_serialization.Model): - """The properties associated with a private endpoint. + """Complete information about the private endpoint. Variables are only populated by the server, and will be ignored when sending a request. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Unique opaque string (generally a GUID) that represents the metadata state of the + resource (private endpoint) and changes whenever the resource is updated. Required on PUT + (CreateOrUpdate) requests. + :vartype etag: str :ivar created_date: The date when this private endpoint was created. :vartype created_date: str :ivar manual_private_link_service_connections: A list of connections to the remote resource. @@ -6393,13 +4731,21 @@ class PrivateEndpointProperties(_serialization.Model): """ _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, "created_date": {"readonly": True}, } _attribute_map = { - "created_date": {"key": "createdDate", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "created_date": {"key": "properties.createdDate", "type": "str"}, "manual_private_link_service_connections": { - "key": "manualPrivateLinkServiceConnections", + "key": "properties.manualPrivateLinkServiceConnections", "type": "[PrivateLinkServiceConnection]", }, } @@ -6417,10 +4763,39 @@ def __init__( list[~azure.mgmt.streamanalytics.models.PrivateLinkServiceConnection] """ super().__init__(**kwargs) + self.etag = None self.created_date = None self.manual_private_link_service_connections = manual_private_link_service_connections +class PrivateEndpointListResult(_serialization.Model): + """A list of private endpoints. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: A list of private endpoints. + :vartype value: list[~azure.mgmt.streamanalytics.models.PrivateEndpoint] + :ivar next_link: The URL to fetch the next set of private endpoints. + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[PrivateEndpoint]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + class PrivateLinkConnectionState(_serialization.Model): """A collection of read-only information about the state of the connection to the private remote resource. @@ -6486,385 +4861,36 @@ class PrivateLinkServiceConnection(_serialization.Model): "group_ids": {"key": "properties.groupIds", "type": "[str]"}, "request_message": {"key": "properties.requestMessage", "type": "str"}, "private_link_service_connection_state": { - "key": "properties.privateLinkServiceConnectionState", - "type": "PrivateLinkConnectionState", - }, - } - - def __init__( - self, - *, - private_link_service_id: Optional[str] = None, - group_ids: Optional[List[str]] = None, - private_link_service_connection_state: Optional["_models.PrivateLinkConnectionState"] = None, - **kwargs: Any - ) -> None: - """ - :keyword private_link_service_id: The resource id of the private link service. Required on PUT - (CreateOrUpdate) requests. - :paramtype private_link_service_id: str - :keyword group_ids: The ID(s) of the group(s) obtained from the remote resource that this - private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. - :paramtype group_ids: list[str] - :keyword private_link_service_connection_state: A collection of read-only information about the - state of the connection to the private remote resource. - :paramtype private_link_service_connection_state: - ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState - """ - super().__init__(**kwargs) - self.private_link_service_id = private_link_service_id - self.group_ids = group_ids - self.request_message = None - self.private_link_service_connection_state = private_link_service_connection_state - - -class QueryCompilationError(_serialization.Model): - """An error produced by the compiler. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar message: The content of the error message. - :vartype message: str - :ivar start_line: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype start_line: int - :ivar start_column: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype start_column: int - :ivar end_line: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype end_line: int - :ivar end_column: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype end_column: int - :ivar is_global: Whether the error is not for a specific part but for the entire query. - :vartype is_global: bool - """ - - _validation = { - "message": {"readonly": True}, - "start_line": {"readonly": True}, - "start_column": {"readonly": True}, - "end_line": {"readonly": True}, - "end_column": {"readonly": True}, - "is_global": {"readonly": True}, - } - - _attribute_map = { - "message": {"key": "message", "type": "str"}, - "start_line": {"key": "startLine", "type": "int"}, - "start_column": {"key": "startColumn", "type": "int"}, - "end_line": {"key": "endLine", "type": "int"}, - "end_column": {"key": "endColumn", "type": "int"}, - "is_global": {"key": "isGlobal", "type": "bool"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.message = None - self.start_line = None - self.start_column = None - self.end_line = None - self.end_column = None - self.is_global = None - - -class QueryCompilationResult(_serialization.Model): - """The result of the query compilation request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar errors: Error messages produced by the compiler. - :vartype errors: list[~azure.mgmt.streamanalytics.models.QueryCompilationError] - :ivar warnings: Warning messages produced by the compiler. - :vartype warnings: list[str] - :ivar inputs: All input names used by the query. - :vartype inputs: list[str] - :ivar outputs: All output names used by the query. - :vartype outputs: list[str] - :ivar functions: All function names used by the query. - :vartype functions: list[str] - """ - - _validation = { - "errors": {"readonly": True}, - "warnings": {"readonly": True}, - "inputs": {"readonly": True}, - "outputs": {"readonly": True}, - "functions": {"readonly": True}, - } - - _attribute_map = { - "errors": {"key": "errors", "type": "[QueryCompilationError]"}, - "warnings": {"key": "warnings", "type": "[str]"}, - "inputs": {"key": "inputs", "type": "[str]"}, - "outputs": {"key": "outputs", "type": "[str]"}, - "functions": {"key": "functions", "type": "[str]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.errors = None - self.warnings = None - self.inputs = None - self.outputs = None - self.functions = None - - -class QueryFunction(_serialization.Model): - """A function for the query compilation. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The name of the function. Required. - :vartype name: str - :ivar type: The type of the function. Required. - :vartype type: str - :ivar binding_type: The type of the function binding. Required. - :vartype binding_type: str - :ivar inputs: The inputs for the function. Required. - :vartype inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] - :ivar output: An output for the function. Required. - :vartype output: ~azure.mgmt.streamanalytics.models.FunctionOutput - """ - - _validation = { - "name": {"required": True}, - "type": {"required": True}, - "binding_type": {"required": True}, - "inputs": {"required": True}, - "output": {"required": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "binding_type": {"key": "bindingType", "type": "str"}, - "inputs": {"key": "inputs", "type": "[FunctionInput]"}, - "output": {"key": "output", "type": "FunctionOutput"}, - } - - def __init__( - self, - *, - name: str, - type: str, - binding_type: str, - inputs: List["_models.FunctionInput"], - output: "_models.FunctionOutput", - **kwargs: Any - ) -> None: - """ - :keyword name: The name of the function. Required. - :paramtype name: str - :keyword type: The type of the function. Required. - :paramtype type: str - :keyword binding_type: The type of the function binding. Required. - :paramtype binding_type: str - :keyword inputs: The inputs for the function. Required. - :paramtype inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] - :keyword output: An output for the function. Required. - :paramtype output: ~azure.mgmt.streamanalytics.models.FunctionOutput - """ - super().__init__(**kwargs) - self.name = name - self.type = type - self.binding_type = binding_type - self.inputs = inputs - self.output = output - - -class QueryInput(_serialization.Model): - """An input for the query compilation. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The name of the input. Required. - :vartype name: str - :ivar type: The type of the input, can be Stream or Reference. Required. - :vartype type: str - """ - - _validation = { - "name": {"required": True}, - "type": {"required": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - def __init__(self, *, name: str, type: str, **kwargs: Any) -> None: - """ - :keyword name: The name of the input. Required. - :paramtype name: str - :keyword type: The type of the input, can be Stream or Reference. Required. - :paramtype type: str - """ - super().__init__(**kwargs) - self.name = name - self.type = type - - -class QueryTestingResult(Error): - """The result of the query testing request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: Error definition properties. - :vartype error: ~azure.mgmt.streamanalytics.models.ErrorError - :ivar status: The status of the query testing request. Known values are: "Started", "Success", - "CompilerError", "RuntimeError", "Timeout", and "UnknownError". - :vartype status: str or ~azure.mgmt.streamanalytics.models.QueryTestingResultStatus - :ivar output_uri: The SAS URL to the outputs payload. - :vartype output_uri: str - """ - - _validation = { - "status": {"readonly": True}, - "output_uri": {"readonly": True}, - } - - _attribute_map = { - "error": {"key": "error", "type": "ErrorError"}, - "status": {"key": "status", "type": "str"}, - "output_uri": {"key": "outputUri", "type": "str"}, - } - - def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs: Any) -> None: - """ - :keyword error: Error definition properties. - :paramtype error: ~azure.mgmt.streamanalytics.models.ErrorError - """ - super().__init__(error=error, **kwargs) - self.status = None - self.output_uri = None - - -class RawOutputDatasource(OutputDataSource): - """Describes a raw output data source. This data source type is only applicable/usable when using - the query testing API. You cannot create a job with this data source type or add an output of - this data source type to an existing job. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of data source output will be written to. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar payload_uri: The SAS URL to a blob where the output should be written. If this property - is not set, output data will be written into a temporary storage, and a SAS URL to that - temporary storage will be included in the result. - :vartype payload_uri: str - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "payload_uri": {"key": "properties.payloadUri", "type": "str"}, - } - - def __init__(self, *, payload_uri: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword payload_uri: The SAS URL to a blob where the output should be written. If this - property is not set, output data will be written into a temporary storage, and a SAS URL to - that temporary storage will be included in the result. - :paramtype payload_uri: str - """ - super().__init__(**kwargs) - self.type: str = "Raw" - self.payload_uri = payload_uri - - -class RawReferenceInputDataSource(ReferenceInputDataSource): - """Describes a raw input data source that contains reference data. This data source type is only - applicable/usable when using the query testing API. You cannot create a job with this data - source type or add an input of this data source type to an existing job. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of input data source containing reference data. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar payload: The JSON serialized content of the input data. Either payload or payloadUri must - be set, but not both. - :vartype payload: str - :ivar payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :vartype payload_uri: str - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "payload": {"key": "properties.payload", "type": "str"}, - "payload_uri": {"key": "properties.payloadUri", "type": "str"}, - } - - def __init__(self, *, payload: Optional[str] = None, payload_uri: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword payload: The JSON serialized content of the input data. Either payload or payloadUri - must be set, but not both. - :paramtype payload: str - :keyword payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :paramtype payload_uri: str - """ - super().__init__(**kwargs) - self.type: str = "Raw" - self.payload = payload - self.payload_uri = payload_uri - - -class RawStreamInputDataSource(StreamInputDataSource): - """Describes a raw input data source that contains stream data. This data source type is only - applicable/usable when using the query testing API. You cannot create a job with this data - source type or add an input of this data source type to an existing job. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of input data source containing stream data. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar payload: The JSON serialized content of the input data. Either payload or payloadUri must - be set, but not both. - :vartype payload: str - :ivar payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :vartype payload_uri: str - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "payload": {"key": "properties.payload", "type": "str"}, - "payload_uri": {"key": "properties.payloadUri", "type": "str"}, + "key": "properties.privateLinkServiceConnectionState", + "type": "PrivateLinkConnectionState", + }, } - def __init__(self, *, payload: Optional[str] = None, payload_uri: Optional[str] = None, **kwargs: Any) -> None: + def __init__( + self, + *, + private_link_service_id: Optional[str] = None, + group_ids: Optional[List[str]] = None, + private_link_service_connection_state: Optional["_models.PrivateLinkConnectionState"] = None, + **kwargs: Any + ) -> None: """ - :keyword payload: The JSON serialized content of the input data. Either payload or payloadUri - must be set, but not both. - :paramtype payload: str - :keyword payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :paramtype payload_uri: str + :keyword private_link_service_id: The resource id of the private link service. Required on PUT + (CreateOrUpdate) requests. + :paramtype private_link_service_id: str + :keyword group_ids: The ID(s) of the group(s) obtained from the remote resource that this + private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. + :paramtype group_ids: list[str] + :keyword private_link_service_connection_state: A collection of read-only information about the + state of the connection to the private remote resource. + :paramtype private_link_service_connection_state: + ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState """ super().__init__(**kwargs) - self.type: str = "Raw" - self.payload = payload - self.payload_uri = payload_uri + self.private_link_service_id = private_link_service_id + self.group_ids = group_ids + self.request_message = None + self.private_link_service_connection_state = private_link_service_connection_state class ReferenceInputProperties(InputProperties): @@ -6872,7 +4898,7 @@ class ReferenceInputProperties(InputProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates whether the input is a source of reference data or stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -6892,8 +4918,6 @@ class ReferenceInputProperties(InputProperties): :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :vartype partition_key: str - :ivar watermark_settings: Settings which determine whether to read watermark events. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :ivar datasource: Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. :vartype datasource: ~azure.mgmt.streamanalytics.models.ReferenceInputDataSource @@ -6912,7 +4936,6 @@ class ReferenceInputProperties(InputProperties): "etag": {"key": "etag", "type": "str"}, "compression": {"key": "compression", "type": "Compression"}, "partition_key": {"key": "partitionKey", "type": "str"}, - "watermark_settings": {"key": "watermarkSettings", "type": "InputWatermarkProperties"}, "datasource": {"key": "datasource", "type": "ReferenceInputDataSource"}, } @@ -6922,7 +4945,6 @@ def __init__( serialization: Optional["_models.Serialization"] = None, compression: Optional["_models.Compression"] = None, partition_key: Optional[str] = None, - watermark_settings: Optional["_models.InputWatermarkProperties"] = None, datasource: Optional["_models.ReferenceInputDataSource"] = None, **kwargs: Any ) -> None: @@ -6935,92 +4957,15 @@ def __init__( :keyword partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :paramtype partition_key: str - :keyword watermark_settings: Settings which determine whether to read watermark events. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :keyword datasource: Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. :paramtype datasource: ~azure.mgmt.streamanalytics.models.ReferenceInputDataSource """ - super().__init__( - serialization=serialization, - compression=compression, - partition_key=partition_key, - watermark_settings=watermark_settings, - **kwargs - ) + super().__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) self.type: str = "Reference" self.datasource = datasource -class RefreshConfiguration(_serialization.Model): - """The refresh parameters for any/all updatable user defined functions present in the job config. - - :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more - detailed explanation and example. - :vartype path_pattern: str - :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :vartype date_format: str - :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :vartype time_format: str - :ivar refresh_interval: The refresh interval. - :vartype refresh_interval: str - :ivar refresh_type: This property indicates which data refresh option to use, Blocking or - Nonblocking. Known values are: "Blocking" and "Nonblocking". - :vartype refresh_type: str or ~azure.mgmt.streamanalytics.models.UpdatableUdfRefreshType - """ - - _attribute_map = { - "path_pattern": {"key": "pathPattern", "type": "str"}, - "date_format": {"key": "dateFormat", "type": "str"}, - "time_format": {"key": "timeFormat", "type": "str"}, - "refresh_interval": {"key": "refreshInterval", "type": "str"}, - "refresh_type": {"key": "refreshType", "type": "str"}, - } - - def __init__( - self, - *, - path_pattern: Optional[str] = None, - date_format: Optional[str] = None, - time_format: Optional[str] = None, - refresh_interval: Optional[str] = None, - refresh_type: Optional[Union[str, "_models.UpdatableUdfRefreshType"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more - detailed explanation and example. - :paramtype path_pattern: str - :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of - this property is used as the date format instead. - :paramtype date_format: str - :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of - this property is used as the time format instead. - :paramtype time_format: str - :keyword refresh_interval: The refresh interval. - :paramtype refresh_interval: str - :keyword refresh_type: This property indicates which data refresh option to use, Blocking or - Nonblocking. Known values are: "Blocking" and "Nonblocking". - :paramtype refresh_type: str or ~azure.mgmt.streamanalytics.models.UpdatableUdfRefreshType - """ - super().__init__(**kwargs) - self.path_pattern = path_pattern - self.date_format = date_format - self.time_format = time_format - self.refresh_interval = refresh_interval - self.refresh_type = refresh_type - - class ResourceTestStatus(_serialization.Model): """Describes the status of the test operation along with error information, if applicable. @@ -7049,111 +4994,12 @@ def __init__(self, **kwargs: Any) -> None: self.error = None -class SampleInput(_serialization.Model): - """The stream analytics input to sample. - - :ivar input: The stream analytics input to sample. - :vartype input: ~azure.mgmt.streamanalytics.models.Input - :ivar compatibility_level: Defaults to the default ASA job compatibility level. Today it is - 1.2. - :vartype compatibility_level: str - :ivar events_uri: The SAS URI of the storage blob for service to write the sampled events to. - If this parameter is not provided, service will write events to he system account and share a - temporary SAS URI to it. - :vartype events_uri: str - :ivar data_locale: Defaults to en-US. - :vartype data_locale: str - """ - - _attribute_map = { - "input": {"key": "input", "type": "Input"}, - "compatibility_level": {"key": "compatibilityLevel", "type": "str"}, - "events_uri": {"key": "eventsUri", "type": "str"}, - "data_locale": {"key": "dataLocale", "type": "str"}, - } - - def __init__( - self, - *, - input: Optional["_models.Input"] = None, - compatibility_level: Optional[str] = None, - events_uri: Optional[str] = None, - data_locale: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword input: The stream analytics input to sample. - :paramtype input: ~azure.mgmt.streamanalytics.models.Input - :keyword compatibility_level: Defaults to the default ASA job compatibility level. Today it is - 1.2. - :paramtype compatibility_level: str - :keyword events_uri: The SAS URI of the storage blob for service to write the sampled events - to. If this parameter is not provided, service will write events to he system account and share - a temporary SAS URI to it. - :paramtype events_uri: str - :keyword data_locale: Defaults to en-US. - :paramtype data_locale: str - """ - super().__init__(**kwargs) - self.input = input - self.compatibility_level = compatibility_level - self.events_uri = events_uri - self.data_locale = data_locale - - -class SampleInputResult(Error): - """The result of the sample input request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: Error definition properties. - :vartype error: ~azure.mgmt.streamanalytics.models.ErrorError - :ivar status: The status of the sample input request. Known values are: "ReadAllEventsInRange", - "NoEventsFoundInRange", and "ErrorConnectingToInput". - :vartype status: str or ~azure.mgmt.streamanalytics.models.SampleInputResultStatus - :ivar diagnostics: Diagnostics messages. E.g. message indicating some partitions from the input - have no data. - :vartype diagnostics: list[str] - :ivar events_download_url: A SAS URL to download the sampled input data. - :vartype events_download_url: str - :ivar last_arrival_time: The timestamp for the last event in the data. It is in DateTime - format. - :vartype last_arrival_time: str - """ - - _validation = { - "status": {"readonly": True}, - "diagnostics": {"readonly": True}, - "events_download_url": {"readonly": True}, - "last_arrival_time": {"readonly": True}, - } - - _attribute_map = { - "error": {"key": "error", "type": "ErrorError"}, - "status": {"key": "status", "type": "str"}, - "diagnostics": {"key": "diagnostics", "type": "[str]"}, - "events_download_url": {"key": "eventsDownloadUrl", "type": "str"}, - "last_arrival_time": {"key": "lastArrivalTime", "type": "str"}, - } - - def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs: Any) -> None: - """ - :keyword error: Error definition properties. - :paramtype error: ~azure.mgmt.streamanalytics.models.ErrorError - """ - super().__init__(error=error, **kwargs) - self.status = None - self.diagnostics = None - self.events_download_url = None - self.last_arrival_time = None - - class ScalarFunctionProperties(FunctionProperties): """The properties that are associated with a scalar function. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of function. Required. :vartype type: str @@ -7229,7 +5075,7 @@ def __init__(self, *, streaming_units: Optional[int] = None, **kwargs: Any) -> N class ServiceBusQueueOutputDataSource(OutputDataSource): """Describes a Service Bus Queue output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -7322,7 +5168,7 @@ def __init__( self.system_property_columns = system_property_columns -class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): +class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): # pylint: disable=name-too-long """The properties that are associated with a Service Bus Queue output. :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, @@ -7412,7 +5258,7 @@ def __init__( class ServiceBusTopicOutputDataSource(OutputDataSource): """Describes a Service Bus Topic output data source. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates the type of data source output will be written to. Required on PUT (CreateOrReplace) requests. Required. @@ -7505,7 +5351,7 @@ def __init__( self.system_property_columns = system_property_columns -class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): +class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): # pylint: disable=name-too-long """The properties that are associated with a Service Bus Topic output. :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, @@ -7597,71 +5443,19 @@ class Sku(_serialization.Model): :ivar name: The name of the SKU. Required on PUT (CreateOrReplace) requests. "Standard" :vartype name: str or ~azure.mgmt.streamanalytics.models.SkuName - :ivar capacity: The capacity of the SKU. - :vartype capacity: int """ _attribute_map = { "name": {"key": "name", "type": "str"}, - "capacity": {"key": "capacity", "type": "int"}, } - def __init__( - self, *, name: Optional[Union[str, "_models.SkuName"]] = None, capacity: Optional[int] = None, **kwargs: Any - ) -> None: + def __init__(self, *, name: Optional[Union[str, "_models.SkuName"]] = None, **kwargs: Any) -> None: """ :keyword name: The name of the SKU. Required on PUT (CreateOrReplace) requests. "Standard" :paramtype name: str or ~azure.mgmt.streamanalytics.models.SkuName - :keyword capacity: The capacity of the SKU. - :paramtype capacity: int """ super().__init__(**kwargs) self.name = name - self.capacity = capacity - - -class SkuCapacity(_serialization.Model): - """Describes scaling information of a SKU. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar minimum: Specifies the minimum streaming units that the streaming job can use. - :vartype minimum: int - :ivar maximum: Specifies the maximum streaming units that the streaming job can use. - :vartype maximum: int - :ivar default: Specifies the default streaming units that the streaming job can use. - :vartype default: int - :ivar scale_type: The scale type applicable to the SKU. Known values are: "automatic", - "manual", and "none". - :vartype scale_type: str or ~azure.mgmt.streamanalytics.models.SkuCapacityScaleType - :ivar allowed_values: Specifies the valid streaming units a streaming job can scale to. - :vartype allowed_values: list[int] - """ - - _validation = { - "minimum": {"readonly": True}, - "maximum": {"readonly": True}, - "default": {"readonly": True}, - "scale_type": {"readonly": True}, - "allowed_values": {"readonly": True}, - } - - _attribute_map = { - "minimum": {"key": "minimum", "type": "int"}, - "maximum": {"key": "maximum", "type": "int"}, - "default": {"key": "default", "type": "int"}, - "scale_type": {"key": "scaleType", "type": "str"}, - "allowed_values": {"key": "allowedValues", "type": "[int]"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.minimum = None - self.maximum = None - self.default = None - self.scale_type = None - self.allowed_values = None class StartStreamingJobParameters(_serialization.Model): @@ -7716,7 +5510,7 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -7727,14 +5521,11 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib :vartype tags: dict[str, str] :ivar location: The geo-location where the resource lives. :vartype location: str + :ivar identity: Describes the system-assigned managed identity assigned to this job that can be + used to authenticate with inputs and outputs. + :vartype identity: ~azure.mgmt.streamanalytics.models.Identity :ivar sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. :vartype sku: ~azure.mgmt.streamanalytics.models.Sku - :ivar identity: Describes the managed identity assigned to this job that can be used to - authenticate with inputs and outputs. - :vartype identity: ~azure.mgmt.streamanalytics.models.Identity - :ivar sku_properties_sku: Describes the SKU of the streaming job. Required on PUT - (CreateOrReplace) requests. - :vartype sku_properties_sku: ~azure.mgmt.streamanalytics.models.Sku :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. :vartype job_id: str @@ -7818,8 +5609,6 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib JobStorageAccount, this requires the user to also specify jobStorageAccount property. . Known values are: "SystemAccount" and "JobStorageAccount". :vartype content_storage_policy: str or ~azure.mgmt.streamanalytics.models.ContentStoragePolicy - :ivar externals: The storage account where the custom code artifacts are located. - :vartype externals: ~azure.mgmt.streamanalytics.models.External :ivar cluster: The cluster which streaming jobs will run on. :vartype cluster: ~azure.mgmt.streamanalytics.models.ClusterInfo """ @@ -7842,9 +5631,8 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib "type": {"key": "type", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "location": {"key": "location", "type": "str"}, - "sku": {"key": "sku", "type": "Sku"}, "identity": {"key": "identity", "type": "Identity"}, - "sku_properties_sku": {"key": "properties.sku", "type": "Sku"}, + "sku": {"key": "properties.sku", "type": "Sku"}, "job_id": {"key": "properties.jobId", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "job_state": {"key": "properties.jobState", "type": "str"}, @@ -7872,7 +5660,6 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib "etag": {"key": "properties.etag", "type": "str"}, "job_storage_account": {"key": "properties.jobStorageAccount", "type": "JobStorageAccount"}, "content_storage_policy": {"key": "properties.contentStoragePolicy", "type": "str"}, - "externals": {"key": "properties.externals", "type": "External"}, "cluster": {"key": "properties.cluster", "type": "ClusterInfo"}, } @@ -7881,9 +5668,8 @@ def __init__( # pylint: disable=too-many-locals *, tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, - sku: Optional["_models.Sku"] = None, identity: Optional["_models.Identity"] = None, - sku_properties_sku: Optional["_models.Sku"] = None, + sku: Optional["_models.Sku"] = None, job_type: Optional[Union[str, "_models.JobType"]] = None, output_start_mode: Optional[Union[str, "_models.OutputStartMode"]] = None, output_start_time: Optional[datetime.datetime] = None, @@ -7899,7 +5685,6 @@ def __init__( # pylint: disable=too-many-locals functions: Optional[List["_models.Function"]] = None, job_storage_account: Optional["_models.JobStorageAccount"] = None, content_storage_policy: Optional[Union[str, "_models.ContentStoragePolicy"]] = None, - externals: Optional["_models.External"] = None, cluster: Optional["_models.ClusterInfo"] = None, **kwargs: Any ) -> None: @@ -7908,15 +5693,12 @@ def __init__( # pylint: disable=too-many-locals :paramtype tags: dict[str, str] :keyword location: The geo-location where the resource lives. :paramtype location: str + :keyword identity: Describes the system-assigned managed identity assigned to this job that can + be used to authenticate with inputs and outputs. + :paramtype identity: ~azure.mgmt.streamanalytics.models.Identity :keyword sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. :paramtype sku: ~azure.mgmt.streamanalytics.models.Sku - :keyword identity: Describes the managed identity assigned to this job that can be used to - authenticate with inputs and outputs. - :paramtype identity: ~azure.mgmt.streamanalytics.models.Identity - :keyword sku_properties_sku: Describes the SKU of the streaming job. Required on PUT - (CreateOrReplace) requests. - :paramtype sku_properties_sku: ~azure.mgmt.streamanalytics.models.Sku :keyword job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. Known values are: "Cloud" and "Edge". :paramtype job_type: str or ~azure.mgmt.streamanalytics.models.JobType @@ -7982,15 +5764,12 @@ def __init__( # pylint: disable=too-many-locals Known values are: "SystemAccount" and "JobStorageAccount". :paramtype content_storage_policy: str or ~azure.mgmt.streamanalytics.models.ContentStoragePolicy - :keyword externals: The storage account where the custom code artifacts are located. - :paramtype externals: ~azure.mgmt.streamanalytics.models.External :keyword cluster: The cluster which streaming jobs will run on. :paramtype cluster: ~azure.mgmt.streamanalytics.models.ClusterInfo """ super().__init__(tags=tags, location=location, **kwargs) - self.sku = sku self.identity = identity - self.sku_properties_sku = sku_properties_sku + self.sku = sku self.job_id = None self.provisioning_state = None self.job_state = None @@ -8012,7 +5791,6 @@ def __init__( # pylint: disable=too-many-locals self.etag = None self.job_storage_account = job_storage_account self.content_storage_policy = content_storage_policy - self.externals = externals self.cluster = cluster @@ -8049,7 +5827,7 @@ class StreamInputProperties(InputProperties): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to server. :ivar type: Indicates whether the input is a source of reference data or stream data. Required on PUT (CreateOrReplace) requests. Required. @@ -8069,8 +5847,6 @@ class StreamInputProperties(InputProperties): :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :vartype partition_key: str - :ivar watermark_settings: Settings which determine whether to read watermark events. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :ivar datasource: Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. :vartype datasource: ~azure.mgmt.streamanalytics.models.StreamInputDataSource @@ -8089,7 +5865,6 @@ class StreamInputProperties(InputProperties): "etag": {"key": "etag", "type": "str"}, "compression": {"key": "compression", "type": "Compression"}, "partition_key": {"key": "partitionKey", "type": "str"}, - "watermark_settings": {"key": "watermarkSettings", "type": "InputWatermarkProperties"}, "datasource": {"key": "datasource", "type": "StreamInputDataSource"}, } @@ -8099,7 +5874,6 @@ def __init__( serialization: Optional["_models.Serialization"] = None, compression: Optional["_models.Compression"] = None, partition_key: Optional[str] = None, - watermark_settings: Optional["_models.InputWatermarkProperties"] = None, datasource: Optional["_models.StreamInputDataSource"] = None, **kwargs: Any ) -> None: @@ -8112,19 +5886,11 @@ def __init__( :keyword partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :paramtype partition_key: str - :keyword watermark_settings: Settings which determine whether to read watermark events. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :keyword datasource: Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. :paramtype datasource: ~azure.mgmt.streamanalytics.models.StreamInputDataSource """ - super().__init__( - serialization=serialization, - compression=compression, - partition_key=partition_key, - watermark_settings=watermark_settings, - **kwargs - ) + super().__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) self.type: str = "Stream" self.datasource = datasource @@ -8195,160 +5961,6 @@ def __init__(self, **kwargs: Any) -> None: self.value = None -class TestDatasourceResult(Error): - """The result of the test input or output request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: Error definition properties. - :vartype error: ~azure.mgmt.streamanalytics.models.ErrorError - :ivar status: The status of the sample output request. Known values are: "TestSucceeded" and - "TestFailed". - :vartype status: str or ~azure.mgmt.streamanalytics.models.TestDatasourceResultStatus - """ - - _validation = { - "status": {"readonly": True}, - } - - _attribute_map = { - "error": {"key": "error", "type": "ErrorError"}, - "status": {"key": "status", "type": "str"}, - } - - def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs: Any) -> None: - """ - :keyword error: Error definition properties. - :paramtype error: ~azure.mgmt.streamanalytics.models.ErrorError - """ - super().__init__(error=error, **kwargs) - self.status = None - - -class TestInput(_serialization.Model): - """A stream analytics input. - - All required parameters must be populated in order to send to Azure. - - :ivar input: The stream analytics input to test. Required. - :vartype input: ~azure.mgmt.streamanalytics.models.Input - """ - - _validation = { - "input": {"required": True}, - } - - _attribute_map = { - "input": {"key": "input", "type": "Input"}, - } - - def __init__(self, *, input: "_models.Input", **kwargs: Any) -> None: - """ - :keyword input: The stream analytics input to test. Required. - :paramtype input: ~azure.mgmt.streamanalytics.models.Input - """ - super().__init__(**kwargs) - self.input = input - - -class TestOutput(_serialization.Model): - """A stream analytics output. - - All required parameters must be populated in order to send to Azure. - - :ivar output: The stream analytics output to test. Required. - :vartype output: ~azure.mgmt.streamanalytics.models.Output - """ - - _validation = { - "output": {"required": True}, - } - - _attribute_map = { - "output": {"key": "output", "type": "Output"}, - } - - def __init__(self, *, output: "_models.Output", **kwargs: Any) -> None: - """ - :keyword output: The stream analytics output to test. Required. - :paramtype output: ~azure.mgmt.streamanalytics.models.Output - """ - super().__init__(**kwargs) - self.output = output - - -class TestQuery(_serialization.Model): - """The request object for query testing. - - All required parameters must be populated in order to send to Azure. - - :ivar diagnostics: Diagnostics information related to query testing. - :vartype diagnostics: ~azure.mgmt.streamanalytics.models.TestQueryDiagnostics - :ivar streaming_job: Stream analytics job object which defines the input, output, and - transformation for the query testing. Required. - :vartype streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob - """ - - _validation = { - "streaming_job": {"required": True}, - } - - _attribute_map = { - "diagnostics": {"key": "diagnostics", "type": "TestQueryDiagnostics"}, - "streaming_job": {"key": "streamingJob", "type": "StreamingJob"}, - } - - def __init__( - self, - *, - streaming_job: "_models.StreamingJob", - diagnostics: Optional["_models.TestQueryDiagnostics"] = None, - **kwargs: Any - ) -> None: - """ - :keyword diagnostics: Diagnostics information related to query testing. - :paramtype diagnostics: ~azure.mgmt.streamanalytics.models.TestQueryDiagnostics - :keyword streaming_job: Stream analytics job object which defines the input, output, and - transformation for the query testing. Required. - :paramtype streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob - """ - super().__init__(**kwargs) - self.diagnostics = diagnostics - self.streaming_job = streaming_job - - -class TestQueryDiagnostics(_serialization.Model): - """Diagnostics information related to query testing. - - All required parameters must be populated in order to send to Azure. - - :ivar write_uri: The SAS URI to the container or directory. Required. - :vartype write_uri: str - :ivar path: The path to the subdirectory. - :vartype path: str - """ - - _validation = { - "write_uri": {"required": True}, - } - - _attribute_map = { - "write_uri": {"key": "writeUri", "type": "str"}, - "path": {"key": "path", "type": "str"}, - } - - def __init__(self, *, write_uri: str, path: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword write_uri: The SAS URI to the container or directory. Required. - :paramtype write_uri: str - :keyword path: The path to the subdirectory. - :paramtype path: str - """ - super().__init__(**kwargs) - self.write_uri = write_uri - self.path = path - - class Transformation(SubResource): """A transformation object, containing all information associated with the named transformation. All transformations are contained under a streaming job. diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py index 98670f0ef7e7..3aba7399d6dd 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py @@ -11,20 +11,13 @@ class AuthenticationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Authentication Mode. Valid modes are ``ConnectionString``\ , ``Msi`` and 'UserToken'.""" + """Authentication Mode. Valid modes are ``ConnectionString``\\ , ``Msi`` and 'UserToken'.""" MSI = "Msi" USER_TOKEN = "UserToken" CONNECTION_STRING = "ConnectionString" -class BlobWriteMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Determines whether blob blocks are either committed automatically or appended.""" - - APPEND = "Append" - ONCE = "Once" - - class ClusterProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of the cluster provisioning. The three terminal states are: Succeeded, Failed and Canceled. @@ -81,13 +74,6 @@ class Encoding(str, Enum, metaclass=CaseInsensitiveEnumMeta): UTF8 = "UTF8" -class EventGridEventSchemaType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Supported Event Grid schema types.""" - - EVENT_GRID_EVENT_SCHEMA = "EventGridEventSchema" - CLOUD_EVENT_SCHEMA = "CloudEventSchema" - - class EventSerializationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests. @@ -96,9 +82,7 @@ class EventSerializationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): CSV = "Csv" AVRO = "Avro" JSON = "Json" - CUSTOM_CLR = "CustomClr" PARQUET = "Parquet" - DELTA = "Delta" class EventsOutOfOrderPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -108,13 +92,6 @@ class EventsOutOfOrderPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): DROP = "Drop" -class InputWatermarkMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The input watermark mode.""" - - NONE = "None" - READ_WATERMARK = "ReadWatermark" - - class JobState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The current execution state of the streaming job.""" @@ -180,31 +157,6 @@ class OutputStartMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): LAST_OUTPUT_EVENT_TIME = "LastOutputEventTime" -class OutputWatermarkMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The output watermark mode.""" - - NONE = "None" - SEND_CURRENT_PARTITION_WATERMARK = "SendCurrentPartitionWatermark" - SEND_LOWEST_WATERMARK_ACROSS_PARTITIONS = "SendLowestWatermarkAcrossPartitions" - - -class QueryTestingResultStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the query testing request.""" - - STARTED = "Started" - """The query testing operation was initiated.""" - SUCCESS = "Success" - """The query testing operation succeeded.""" - COMPILER_ERROR = "CompilerError" - """The query testing operation failed due to a compiler error.""" - RUNTIME_ERROR = "RuntimeError" - """The query testing operation failed due to a runtime error.""" - TIMEOUT = "Timeout" - """The query testing operation failed due to a timeout.""" - UNKNOWN_ERROR = "UnknownError" - """The query testing operation failed due to an unknown error .""" - - class RefreshType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates the type of data refresh option.""" @@ -213,58 +165,7 @@ class RefreshType(str, Enum, metaclass=CaseInsensitiveEnumMeta): REFRESH_PERIODICALLY_WITH_DELTA = "RefreshPeriodicallyWithDelta" -class ResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of resource the SKU applies to.""" - - MICROSOFT_STREAM_ANALYTICS_STREAMINGJOBS = "Microsoft.StreamAnalytics/streamingjobs" - - -class SampleInputResultStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the sample input request.""" - - READ_ALL_EVENTS_IN_RANGE = "ReadAllEventsInRange" - """The sample input operation successfully read all the events in the range.""" - NO_EVENTS_FOUND_IN_RANGE = "NoEventsFoundInRange" - """The sample input operation found no events in the range.""" - ERROR_CONNECTING_TO_INPUT = "ErrorConnectingToInput" - """The sample input operation failed to connect to the input.""" - - -class SkuCapacityScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The scale type applicable to the SKU.""" - - AUTOMATIC = "automatic" - """Supported scale type automatic.""" - MANUAL = "manual" - """Supported scale type manual.""" - NONE = "none" - """Scaling not supported.""" - - class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The name of the SKU. Required on PUT (CreateOrReplace) requests.""" STANDARD = "Standard" - - -class TestDatasourceResultStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the test input or output request.""" - - TEST_SUCCEEDED = "TestSucceeded" - """The test datasource operation succeeded.""" - TEST_FAILED = "TestFailed" - """The test datasource operation failed.""" - - -class UpdatableUdfRefreshType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """This property indicates which data refresh option to use, Blocking or Nonblocking.""" - - BLOCKING = "Blocking" - NONBLOCKING = "Nonblocking" - - -class UpdateMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Refresh modes for Stream Analytics functions.""" - - STATIC = "Static" - REFRESHABLE = "Refreshable" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py index 729eeb5cd6df..59ddeae83d21 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py @@ -6,14 +6,13 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations -from ._inputs_operations import InputsOperations -from ._outputs_operations import OutputsOperations from ._operations import Operations from ._streaming_jobs_operations import StreamingJobsOperations -from ._sku_operations import SkuOperations -from ._subscriptions_operations import SubscriptionsOperations +from ._inputs_operations import InputsOperations +from ._outputs_operations import OutputsOperations from ._transformations_operations import TransformationsOperations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations @@ -22,14 +21,13 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ - "FunctionsOperations", - "InputsOperations", - "OutputsOperations", "Operations", "StreamingJobsOperations", - "SkuOperations", - "SubscriptionsOperations", + "InputsOperations", + "OutputsOperations", "TransformationsOperations", + "FunctionsOperations", + "SubscriptionsOperations", "ClustersOperations", "PrivateEndpointsOperations", ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py index de9154d2818a..994685a8e38b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,8 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -50,7 +56,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -90,7 +96,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -126,7 +132,7 @@ def build_get_request(resource_group_name: str, cluster_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -159,7 +165,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -190,7 +196,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -214,7 +220,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -246,7 +252,7 @@ def build_list_streaming_jobs_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -296,12 +302,12 @@ def _create_or_update_initial( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any - ) -> _models.Cluster: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -312,9 +318,9 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -324,7 +330,7 @@ def _create_or_update_initial( else: _json = self._serialize.body(cluster, "Cluster") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -334,40 +340,35 @@ def _create_or_update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("Cluster", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - _create_or_update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } - @overload def begin_create_or_update( self, @@ -401,14 +402,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -419,7 +412,7 @@ def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - cluster: IO, + cluster: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -435,7 +428,7 @@ def begin_create_or_update( :type cluster_name: str :param cluster: The definition of the cluster that will be used to create a new cluster or replace the existing one. Required. - :type cluster: IO + :type cluster: IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -447,14 +440,6 @@ def begin_create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -465,7 +450,7 @@ def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -478,8 +463,8 @@ def begin_create_or_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The definition of the cluster that will be used to create a new cluster or - replace the existing one. Is either a Cluster type or a IO type. Required. - :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO + replace the existing one. Is either a Cluster type or a IO[bytes] type. Required. + :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -488,17 +473,6 @@ def begin_create_or_update( an existing record set. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -506,7 +480,7 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -526,12 +500,13 @@ def begin_create_or_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -541,27 +516,25 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return LROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) def _update_initial( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any - ) -> Optional[_models.Cluster]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -572,9 +545,9 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.Cluster]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -584,7 +557,7 @@ def _update_initial( else: _json = self._serialize.body(cluster, "Cluster") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, @@ -593,37 +566,34 @@ def _update_initial( content_type=content_type, json=_json, content=_content, - template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _update_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return deserialized # type: ignore @overload def begin_update( @@ -654,14 +624,6 @@ def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -672,7 +634,7 @@ def begin_update( self, resource_group_name: str, cluster_name: str, - cluster: IO, + cluster: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -688,7 +650,7 @@ def begin_update( :type cluster_name: str :param cluster: The properties specified here will overwrite the corresponding properties in the existing cluster (ie. Those properties will be updated). Required. - :type cluster: IO + :type cluster: IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -696,14 +658,6 @@ def begin_update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -714,7 +668,7 @@ def begin_update( self, resource_group_name: str, cluster_name: str, - cluster: Union[_models.Cluster, IO], + cluster: Union[_models.Cluster, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> LROPoller[_models.Cluster]: @@ -727,24 +681,13 @@ def begin_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The properties specified here will overwrite the corresponding properties in - the existing cluster (ie. Those properties will be updated). Is either a Cluster type or a IO - type. Required. - :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO + the existing cluster (ie. Those properties will be updated). Is either a Cluster type or a + IO[bytes] type. Required. + :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -752,7 +695,7 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -771,12 +714,13 @@ def begin_update( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -786,17 +730,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.Cluster].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return LROPoller[_models.Cluster]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @distributed_trace def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _models.Cluster: @@ -807,12 +749,11 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Cluster or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Cluster :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -823,24 +764,22 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -850,21 +789,15 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Cluster", pipeline_response) + deserialized = self._deserialize("Cluster", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return deserialized # type: ignore - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, cluster_name: str, **kwargs: Any - ) -> None: - error_map = { + def _delete_initial(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -875,39 +808,42 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> LROPoller[None]: @@ -918,14 +854,6 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -933,13 +861,13 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, api_version=api_version, @@ -948,11 +876,12 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -961,23 +890,18 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Cluster"]: """Lists all of the clusters in the given subscription. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -985,10 +909,10 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Cluster"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -999,22 +923,30 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Cluster"]: def prepare_request(next_link=None): if not next_link: - request = build_list_by_subscription_request( + _request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_subscription.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -1024,11 +956,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1041,10 +973,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_subscription.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters" - } - @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Cluster"]: """Lists all of the clusters in the given resource group. @@ -1052,7 +980,6 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Cluster or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.Cluster] :raises ~azure.core.exceptions.HttpResponseError: @@ -1060,10 +987,10 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1074,23 +1001,31 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) @@ -1100,11 +1035,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1117,10 +1052,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters" - } - @distributed_trace def list_streaming_jobs( self, resource_group_name: str, cluster_name: str, **kwargs: Any @@ -1132,7 +1063,6 @@ def list_streaming_jobs( :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ClusterJob or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.ClusterJob] :raises ~azure.core.exceptions.HttpResponseError: @@ -1140,10 +1070,10 @@ def list_streaming_jobs( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ClusterJobListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1154,24 +1084,32 @@ def list_streaming_jobs( def prepare_request(next_link=None): if not next_link: - request = build_list_streaming_jobs_request( + _request = build_list_streaming_jobs_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_streaming_jobs.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("ClusterJobListResult", pipeline_response) @@ -1181,11 +1119,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1197,7 +1135,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_streaming_jobs.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs" - } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py index 2aebc9d7f884..f6e5db49c3b6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,8 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +57,7 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -98,7 +104,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -137,7 +143,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -171,7 +177,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -205,7 +211,7 @@ def build_list_by_streaming_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -240,7 +246,7 @@ def build_test_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -271,13 +277,13 @@ def build_test_request( return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_retrieve_default_definition_request( +def build_retrieve_default_definition_request( # pylint: disable=name-too-long resource_group_name: str, job_name: str, function_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -363,7 +369,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -375,7 +380,7 @@ def create_or_replace( resource_group_name: str, job_name: str, function_name: str, - function: IO, + function: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -393,7 +398,7 @@ def create_or_replace( :type function_name: str :param function: The definition of the function that will be used to create a new function or replace the existing one under the streaming job. Required. - :type function: IO + :type function: IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -405,7 +410,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -417,7 +421,7 @@ def create_or_replace( resource_group_name: str, job_name: str, function_name: str, - function: Union[_models.Function, IO], + function: Union[_models.Function, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -432,9 +436,9 @@ def create_or_replace( :param function_name: The name of the function. Required. :type function_name: str :param function: The definition of the function that will be used to create a new function or - replace the existing one under the streaming job. Is either a Function type or a IO type. - Required. - :type function: ~azure.mgmt.streamanalytics.models.Function or IO + replace the existing one under the streaming job. Is either a Function type or a IO[bytes] + type. Required. + :type function: ~azure.mgmt.streamanalytics.models.Function or IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -443,15 +447,11 @@ def create_or_replace( an existing function. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -462,7 +462,7 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) @@ -474,7 +474,7 @@ def create_or_replace( else: _json = self._serialize.body(function, "Function") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -485,16 +485,14 @@ def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -505,25 +503,15 @@ def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Function", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } - @overload def update( self, @@ -559,7 +547,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -571,7 +558,7 @@ def update( resource_group_name: str, job_name: str, function_name: str, - function: IO, + function: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -592,7 +579,7 @@ def update( corresponding properties in the existing function (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation. Required. - :type function: IO + :type function: IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -600,7 +587,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -612,7 +598,7 @@ def update( resource_group_name: str, job_name: str, function_name: str, - function: Union[_models.Function, IO], + function: Union[_models.Function, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Function: @@ -631,21 +617,17 @@ def update( corresponding properties in the existing function (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation. Is either a - Function type or a IO type. Required. - :type function: ~azure.mgmt.streamanalytics.models.Function or IO + Function type or a IO[bytes] type. Required. + :type function: ~azure.mgmt.streamanalytics.models.Function or IO[bytes] :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -656,7 +638,7 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) @@ -668,7 +650,7 @@ def update( else: _json = self._serialize.body(function, "Function") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -678,16 +660,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -700,16 +680,12 @@ def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -724,12 +700,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type job_name: str :param function_name: The name of the function. Required. :type function_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -740,25 +715,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -769,11 +742,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def get(self, resource_group_name: str, job_name: str, function_name: str, **kwargs: Any) -> _models.Function: @@ -786,12 +755,11 @@ def get(self, resource_group_name: str, job_name: str, function_name: str, **kwa :type job_name: str :param function_name: The name of the function. Required. :type function_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -802,25 +770,23 @@ def get(self, resource_group_name: str, job_name: str, function_name: str, **kwa _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -833,16 +799,12 @@ def get(self, resource_group_name: str, job_name: str, function_name: str, **kwa response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" - } + return deserialized # type: ignore @distributed_trace def list_by_streaming_job( @@ -856,11 +818,10 @@ def list_by_streaming_job( :param job_name: The name of the streaming job. Required. :type job_name: str :param select: The $select OData query parameter. This is a comma-separated list of structural - properties to include in the response, or "\ *" to include all properties. By default, all - properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + properties to include in the response, or "\\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\\ ' as a valid value. Default value is None. :type select: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Function or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.Function] :raises ~azure.core.exceptions.HttpResponseError: @@ -868,10 +829,10 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FunctionListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -882,25 +843,33 @@ def list_by_streaming_job( def prepare_request(next_link=None): if not next_link: - request = build_list_by_streaming_job_request( + _request = build_list_by_streaming_job_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, select=select, api_version=api_version, - template_url=self.list_by_streaming_job.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("FunctionListResult", pipeline_response) @@ -910,11 +879,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -927,19 +896,15 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions" - } - def _test_initial( self, resource_group_name: str, job_name: str, function_name: str, - function: Optional[Union[_models.Function, IO]] = None, + function: Optional[Union[_models.Function, IO[bytes]]] = None, **kwargs: Any - ) -> Optional[_models.ResourceTestStatus]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -950,9 +915,9 @@ def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -965,7 +930,7 @@ def _test_initial( else: _json = None - request = build_test_request( + _request = build_test_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -974,37 +939,34 @@ def _test_initial( content_type=content_type, json=_json, content=_content, - template_url=self._test_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _test_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" - } + return deserialized # type: ignore @overload def begin_test( @@ -1038,14 +1000,6 @@ def begin_test( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1058,7 +1012,7 @@ def begin_test( resource_group_name: str, job_name: str, function_name: str, - function: Optional[IO] = None, + function: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1080,18 +1034,10 @@ def begin_test( properties specified will overwrite the corresponding properties in the existing function (exactly like a PATCH operation) and the resulting function will be tested. Default value is None. - :type function: IO + :type function: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1104,7 +1050,7 @@ def begin_test( resource_group_name: str, job_name: str, function_name: str, - function: Optional[Union[_models.Function, IO]] = None, + function: Optional[Union[_models.Function, IO[bytes]]] = None, **kwargs: Any ) -> LROPoller[_models.ResourceTestStatus]: """Tests if the information provided for a function is valid. This can range from testing the @@ -1123,19 +1069,8 @@ def begin_test( this parameter can be left null to test the existing function as is or if specified, the properties specified will overwrite the corresponding properties in the existing function (exactly like a PATCH operation) and the resulting function will be tested. Is either a - Function type or a IO type. Default value is None. - :type function: ~azure.mgmt.streamanalytics.models.Function or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + Function type or a IO[bytes] type. Default value is None. + :type function: ~azure.mgmt.streamanalytics.models.Function or IO[bytes] :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1144,7 +1079,7 @@ def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -1163,12 +1098,13 @@ def begin_test( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = self._deserialize("ResourceTestStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -1178,17 +1114,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.ResourceTestStatus].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" - } + return LROPoller[_models.ResourceTestStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @overload def retrieve_default_definition( @@ -1219,7 +1153,6 @@ def retrieve_default_definition( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -1231,7 +1164,7 @@ def retrieve_default_definition( resource_group_name: str, job_name: str, function_name: str, - function_retrieve_default_definition_parameters: Optional[IO] = None, + function_retrieve_default_definition_parameters: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1247,11 +1180,10 @@ def retrieve_default_definition( :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of function to retrieve the default definition for. Default value is None. - :type function_retrieve_default_definition_parameters: IO + :type function_retrieve_default_definition_parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: @@ -1264,7 +1196,7 @@ def retrieve_default_definition( job_name: str, function_name: str, function_retrieve_default_definition_parameters: Optional[ - Union[_models.FunctionRetrieveDefaultDefinitionParameters, IO] + Union[_models.FunctionRetrieveDefaultDefinitionParameters, IO[bytes]] ] = None, **kwargs: Any ) -> _models.Function: @@ -1279,18 +1211,14 @@ def retrieve_default_definition( :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of function to retrieve the default definition for. Is either a - FunctionRetrieveDefaultDefinitionParameters type or a IO type. Default value is None. + FunctionRetrieveDefaultDefinitionParameters type or a IO[bytes] type. Default value is None. :type function_retrieve_default_definition_parameters: - ~azure.mgmt.streamanalytics.models.FunctionRetrieveDefaultDefinitionParameters or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response + ~azure.mgmt.streamanalytics.models.FunctionRetrieveDefaultDefinitionParameters or IO[bytes] :return: Function or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Function :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1301,7 +1229,7 @@ def retrieve_default_definition( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Function] = kwargs.pop("cls", None) @@ -1318,7 +1246,7 @@ def retrieve_default_definition( else: _json = None - request = build_retrieve_default_definition_request( + _request = build_retrieve_default_definition_request( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -1327,16 +1255,14 @@ def retrieve_default_definition( content_type=content_type, json=_json, content=_content, - template_url=self.retrieve_default_definition.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1346,13 +1272,9 @@ def retrieve_default_definition( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("Function", pipeline_response) + deserialized = self._deserialize("Function", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - retrieve_default_definition.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition" - } + return deserialized # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py index 0d4226aed229..6c24f8b4a469 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,8 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +57,7 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -98,7 +104,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -137,7 +143,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -171,7 +177,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -205,7 +211,7 @@ def build_list_by_streaming_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -240,7 +246,7 @@ def build_test_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -326,7 +332,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -338,7 +343,7 @@ def create_or_replace( resource_group_name: str, job_name: str, input_name: str, - input: IO, + input: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -356,7 +361,7 @@ def create_or_replace( :type input_name: str :param input: The definition of the input that will be used to create a new input or replace the existing one under the streaming job. Required. - :type input: IO + :type input: IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -368,7 +373,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -380,7 +384,7 @@ def create_or_replace( resource_group_name: str, job_name: str, input_name: str, - input: Union[_models.Input, IO], + input: Union[_models.Input, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -395,8 +399,8 @@ def create_or_replace( :param input_name: The name of the input. Required. :type input_name: str :param input: The definition of the input that will be used to create a new input or replace - the existing one under the streaming job. Is either a Input type or a IO type. Required. - :type input: ~azure.mgmt.streamanalytics.models.Input or IO + the existing one under the streaming job. Is either a Input type or a IO[bytes] type. Required. + :type input: ~azure.mgmt.streamanalytics.models.Input or IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -405,15 +409,11 @@ def create_or_replace( existing input. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -424,7 +424,7 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Input] = kwargs.pop("cls", None) @@ -436,7 +436,7 @@ def create_or_replace( else: _json = self._serialize.body(input, "Input") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -447,16 +447,14 @@ def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -467,25 +465,15 @@ def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Input", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Input", pipeline_response) + deserialized = self._deserialize("Input", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } - @overload def update( self, @@ -521,7 +509,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -533,7 +520,7 @@ def update( resource_group_name: str, job_name: str, input_name: str, - input: IO, + input: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -554,7 +541,7 @@ def update( properties in the existing input (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Required. - :type input: IO + :type input: IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -562,7 +549,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: @@ -574,7 +560,7 @@ def update( resource_group_name: str, job_name: str, input_name: str, - input: Union[_models.Input, IO], + input: Union[_models.Input, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Input: @@ -593,21 +579,17 @@ def update( properties in the existing input (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Is either a Input type or a - IO type. Required. - :type input: ~azure.mgmt.streamanalytics.models.Input or IO + IO[bytes] type. Required. + :type input: ~azure.mgmt.streamanalytics.models.Input or IO[bytes] :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -618,7 +600,7 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Input] = kwargs.pop("cls", None) @@ -630,7 +612,7 @@ def update( else: _json = self._serialize.body(input, "Input") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -640,16 +622,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -662,16 +642,12 @@ def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Input", pipeline_response) + deserialized = self._deserialize("Input", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -686,12 +662,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type job_name: str :param input_name: The name of the input. Required. :type input_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -702,25 +677,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -731,11 +704,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs: Any) -> _models.Input: @@ -748,12 +717,11 @@ def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs :type job_name: str :param input_name: The name of the input. Required. :type input_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Input or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Input :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -764,25 +732,23 @@ def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Input] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -795,16 +761,12 @@ def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Input", pipeline_response) + deserialized = self._deserialize("Input", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" - } + return deserialized # type: ignore @distributed_trace def list_by_streaming_job( @@ -818,11 +780,10 @@ def list_by_streaming_job( :param job_name: The name of the streaming job. Required. :type job_name: str :param select: The $select OData query parameter. This is a comma-separated list of structural - properties to include in the response, or "\ *" to include all properties. By default, all - properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + properties to include in the response, or "\\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\\ ' as a valid value. Default value is None. :type select: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Input or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.Input] :raises ~azure.core.exceptions.HttpResponseError: @@ -830,10 +791,10 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.InputListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -844,25 +805,33 @@ def list_by_streaming_job( def prepare_request(next_link=None): if not next_link: - request = build_list_by_streaming_job_request( + _request = build_list_by_streaming_job_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, select=select, api_version=api_version, - template_url=self.list_by_streaming_job.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("InputListResult", pipeline_response) @@ -872,11 +841,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -889,19 +858,15 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs" - } - def _test_initial( self, resource_group_name: str, job_name: str, input_name: str, - input: Optional[Union[_models.Input, IO]] = None, + input: Optional[Union[_models.Input, IO[bytes]]] = None, **kwargs: Any - ) -> Optional[_models.ResourceTestStatus]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -912,9 +877,9 @@ def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -927,7 +892,7 @@ def _test_initial( else: _json = None - request = build_test_request( + _request = build_test_request( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -936,37 +901,34 @@ def _test_initial( content_type=content_type, json=_json, content=_content, - template_url=self._test_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _test_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" - } + return deserialized # type: ignore @overload def begin_test( @@ -998,14 +960,6 @@ def begin_test( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1018,7 +972,7 @@ def begin_test( resource_group_name: str, job_name: str, input_name: str, - input: Optional[IO] = None, + input: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1038,18 +992,10 @@ def begin_test( parameter can be left null to test the existing input as is or if specified, the properties specified will overwrite the corresponding properties in the existing input (exactly like a PATCH operation) and the resulting input will be tested. Default value is None. - :type input: IO + :type input: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1062,7 +1008,7 @@ def begin_test( resource_group_name: str, job_name: str, input_name: str, - input: Optional[Union[_models.Input, IO]] = None, + input: Optional[Union[_models.Input, IO[bytes]]] = None, **kwargs: Any ) -> LROPoller[_models.ResourceTestStatus]: """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics @@ -1079,20 +1025,9 @@ def begin_test( full input definition intended to be tested. If the input specified already exists, this parameter can be left null to test the existing input as is or if specified, the properties specified will overwrite the corresponding properties in the existing input (exactly like a - PATCH operation) and the resulting input will be tested. Is either a Input type or a IO type. - Default value is None. - :type input: ~azure.mgmt.streamanalytics.models.Input or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + PATCH operation) and the resulting input will be tested. Is either a Input type or a IO[bytes] + type. Default value is None. + :type input: ~azure.mgmt.streamanalytics.models.Input or IO[bytes] :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1101,7 +1036,7 @@ def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -1120,12 +1055,13 @@ def begin_test( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = self._deserialize("ResourceTestStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -1135,14 +1071,12 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.ResourceTestStatus].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" - } + return LROPoller[_models.ResourceTestStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py index 39828e9ffdd1..bc2877643660 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,7 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +import sys +from typing import Any, Callable, Dict, Iterable, Optional, Type, TypeVar +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,16 +20,18 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -39,7 +43,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -77,7 +81,6 @@ def __init__(self, *args, **kwargs): def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: """Lists all of the available Stream Analytics related operations. - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Operation or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.Operation] :raises ~azure.core.exceptions.HttpResponseError: @@ -85,10 +88,10 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -99,21 +102,29 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) @@ -123,11 +134,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -139,5 +150,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list.metadata = {"url": "/providers/Microsoft.StreamAnalytics/operations"} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py index 7ba57f7d9fdf..16781bacaa32 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,8 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +57,7 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -98,7 +104,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -137,7 +143,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -171,7 +177,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -205,7 +211,7 @@ def build_list_by_streaming_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -240,7 +246,7 @@ def build_test_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -326,7 +332,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -338,7 +343,7 @@ def create_or_replace( resource_group_name: str, job_name: str, output_name: str, - output: IO, + output: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -356,7 +361,7 @@ def create_or_replace( :type output_name: str :param output: The definition of the output that will be used to create a new output or replace the existing one under the streaming job. Required. - :type output: IO + :type output: IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -368,7 +373,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -380,7 +384,7 @@ def create_or_replace( resource_group_name: str, job_name: str, output_name: str, - output: Union[_models.Output, IO], + output: Union[_models.Output, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -395,8 +399,9 @@ def create_or_replace( :param output_name: The name of the output. Required. :type output_name: str :param output: The definition of the output that will be used to create a new output or replace - the existing one under the streaming job. Is either a Output type or a IO type. Required. - :type output: ~azure.mgmt.streamanalytics.models.Output or IO + the existing one under the streaming job. Is either a Output type or a IO[bytes] type. + Required. + :type output: ~azure.mgmt.streamanalytics.models.Output or IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -405,15 +410,11 @@ def create_or_replace( an existing output. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -424,7 +425,7 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Output] = kwargs.pop("cls", None) @@ -436,7 +437,7 @@ def create_or_replace( else: _json = self._serialize.body(output, "Output") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -447,16 +448,14 @@ def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -467,25 +466,15 @@ def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Output", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Output", pipeline_response) + deserialized = self._deserialize("Output", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } - @overload def update( self, @@ -521,7 +510,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -533,7 +521,7 @@ def update( resource_group_name: str, job_name: str, output_name: str, - output: IO, + output: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -554,7 +542,7 @@ def update( properties in the existing output (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing output will remain the same and not change as a result of this PATCH operation. Required. - :type output: IO + :type output: IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -562,7 +550,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: @@ -574,7 +561,7 @@ def update( resource_group_name: str, job_name: str, output_name: str, - output: Union[_models.Output, IO], + output: Union[_models.Output, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Output: @@ -593,21 +580,17 @@ def update( properties in the existing output (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing output will remain the same and not change as a result of this PATCH operation. Is either a Output type or - a IO type. Required. - :type output: ~azure.mgmt.streamanalytics.models.Output or IO + a IO[bytes] type. Required. + :type output: ~azure.mgmt.streamanalytics.models.Output or IO[bytes] :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -618,7 +601,7 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Output] = kwargs.pop("cls", None) @@ -630,7 +613,7 @@ def update( else: _json = self._serialize.body(output, "Output") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -640,16 +623,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -662,16 +643,12 @@ def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Output", pipeline_response) + deserialized = self._deserialize("Output", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } + return deserialized # type: ignore @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -686,12 +663,11 @@ def delete( # pylint: disable=inconsistent-return-statements :type job_name: str :param output_name: The name of the output. Required. :type output_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -702,25 +678,23 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -731,11 +705,7 @@ def delete( # pylint: disable=inconsistent-return-statements raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } + return cls(pipeline_response, None, {}) # type: ignore @distributed_trace def get(self, resource_group_name: str, job_name: str, output_name: str, **kwargs: Any) -> _models.Output: @@ -748,12 +718,11 @@ def get(self, resource_group_name: str, job_name: str, output_name: str, **kwarg :type job_name: str :param output_name: The name of the output. Required. :type output_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Output or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Output :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -764,25 +733,23 @@ def get(self, resource_group_name: str, job_name: str, output_name: str, **kwarg _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Output] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -795,16 +762,12 @@ def get(self, resource_group_name: str, job_name: str, output_name: str, **kwarg response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Output", pipeline_response) + deserialized = self._deserialize("Output", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" - } + return deserialized # type: ignore @distributed_trace def list_by_streaming_job( @@ -818,11 +781,10 @@ def list_by_streaming_job( :param job_name: The name of the streaming job. Required. :type job_name: str :param select: The $select OData query parameter. This is a comma-separated list of structural - properties to include in the response, or "\ *" to include all properties. By default, all - properties are returned except diagnostics. Currently only accepts '*\ ' as a valid value. + properties to include in the response, or "\\ *" to include all properties. By default, all + properties are returned except diagnostics. Currently only accepts '*\\ ' as a valid value. Default value is None. :type select: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either Output or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.Output] :raises ~azure.core.exceptions.HttpResponseError: @@ -830,10 +792,10 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OutputListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -844,25 +806,33 @@ def list_by_streaming_job( def prepare_request(next_link=None): if not next_link: - request = build_list_by_streaming_job_request( + _request = build_list_by_streaming_job_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, select=select, api_version=api_version, - template_url=self.list_by_streaming_job.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("OutputListResult", pipeline_response) @@ -872,11 +842,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -889,19 +859,15 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs" - } - def _test_initial( self, resource_group_name: str, job_name: str, output_name: str, - output: Optional[Union[_models.Output, IO]] = None, + output: Optional[Union[_models.Output, IO[bytes]]] = None, **kwargs: Any - ) -> Optional[_models.ResourceTestStatus]: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -912,9 +878,9 @@ def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -927,7 +893,7 @@ def _test_initial( else: _json = None - request = build_test_request( + _request = build_test_request( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -936,37 +902,34 @@ def _test_initial( content_type=content_type, json=_json, content=_content, - template_url=self._test_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _test_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" - } + return deserialized # type: ignore @overload def begin_test( @@ -998,14 +961,6 @@ def begin_test( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1018,7 +973,7 @@ def begin_test( resource_group_name: str, job_name: str, output_name: str, - output: Optional[IO] = None, + output: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1038,18 +993,10 @@ def begin_test( parameter can be left null to test the existing output as is or if specified, the properties specified will overwrite the corresponding properties in the existing output (exactly like a PATCH operation) and the resulting output will be tested. Default value is None. - :type output: IO + :type output: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1062,7 +1009,7 @@ def begin_test( resource_group_name: str, job_name: str, output_name: str, - output: Optional[Union[_models.Output, IO]] = None, + output: Optional[Union[_models.Output, IO[bytes]]] = None, **kwargs: Any ) -> LROPoller[_models.ResourceTestStatus]: """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics @@ -1079,20 +1026,9 @@ def begin_test( full output definition intended to be tested. If the output specified already exists, this parameter can be left null to test the existing output as is or if specified, the properties specified will overwrite the corresponding properties in the existing output (exactly like a - PATCH operation) and the resulting output will be tested. Is either a Output type or a IO type. - Default value is None. - :type output: ~azure.mgmt.streamanalytics.models.Output or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + PATCH operation) and the resulting output will be tested. Is either a Output type or a + IO[bytes] type. Default value is None. + :type output: ~azure.mgmt.streamanalytics.models.Output or IO[bytes] :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] @@ -1101,7 +1037,7 @@ def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -1120,12 +1056,13 @@ def begin_test( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("ResourceTestStatus", pipeline_response) + deserialized = self._deserialize("ResourceTestStatus", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized if polling is True: @@ -1135,14 +1072,12 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.ResourceTestStatus].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" - } + return LROPoller[_models.ResourceTestStatus]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py index a5e98129ad4c..48bf6dfa69d2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,8 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +57,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -92,7 +98,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -126,7 +132,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -160,7 +166,7 @@ def build_list_by_cluster_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -242,7 +248,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: @@ -254,7 +259,7 @@ def create_or_update( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - private_endpoint: IO, + private_endpoint: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -272,7 +277,7 @@ def create_or_update( :type private_endpoint_name: str :param private_endpoint: The definition of the private endpoint that will be used to create a new cluster or replace the existing one. Required. - :type private_endpoint: IO + :type private_endpoint: IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -284,7 +289,6 @@ def create_or_update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: @@ -296,7 +300,7 @@ def create_or_update( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - private_endpoint: Union[_models.PrivateEndpoint, IO], + private_endpoint: Union[_models.PrivateEndpoint, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -311,9 +315,9 @@ def create_or_update( :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str :param private_endpoint: The definition of the private endpoint that will be used to create a - new cluster or replace the existing one. Is either a PrivateEndpoint type or a IO type. + new cluster or replace the existing one. Is either a PrivateEndpoint type or a IO[bytes] type. Required. - :type private_endpoint: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or IO + :type private_endpoint: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or IO[bytes] :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -322,15 +326,11 @@ def create_or_update( an existing record set. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -341,7 +341,7 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) @@ -353,7 +353,7 @@ def create_or_update( else: _json = self._serialize.body(private_endpoint, "PrivateEndpoint") - request = build_create_or_update_request( + _request = build_create_or_update_request( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, @@ -364,16 +364,14 @@ def create_or_update( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -383,21 +381,13 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if response.status_code == 200: - deserialized = self._deserialize("PrivateEndpoint", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("PrivateEndpoint", pipeline_response) + deserialized = self._deserialize("PrivateEndpoint", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - create_or_update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } - @distributed_trace def get( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, **kwargs: Any @@ -411,12 +401,11 @@ def get( :type cluster_name: str :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateEndpoint or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -427,25 +416,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -455,21 +442,17 @@ def get( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpoint", pipeline_response) + deserialized = self._deserialize("PrivateEndpoint", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } + return deserialized # type: ignore - def _delete_initial( # pylint: disable=inconsistent-return-statements + def _delete_initial( self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -480,40 +463,43 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete( @@ -528,14 +514,6 @@ def begin_delete( :type cluster_name: str :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -543,13 +521,13 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, private_endpoint_name=private_endpoint_name, @@ -559,11 +537,12 @@ def begin_delete( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -572,17 +551,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def list_by_cluster( @@ -595,7 +570,6 @@ def list_by_cluster( :type resource_group_name: str :param cluster_name: The name of the cluster. Required. :type cluster_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PrivateEndpoint or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.PrivateEndpoint] :raises ~azure.core.exceptions.HttpResponseError: @@ -603,10 +577,10 @@ def list_by_cluster( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpointListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -617,24 +591,32 @@ def list_by_cluster( def prepare_request(next_link=None): if not next_link: - request = build_list_by_cluster_request( + _request = build_list_by_cluster_request( resource_group_name=resource_group_name, cluster_name=cluster_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_cluster.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("PrivateEndpointListResult", pipeline_response) @@ -644,11 +626,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -660,7 +642,3 @@ def get_next(next_link=None): return pipeline_response return ItemPaged(get_next, extract_data) - - list_by_cluster.metadata = { - "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints" - } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_sku_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_sku_operations.py deleted file mode 100644 index 7cab0d4b6269..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_sku_operations.py +++ /dev/null @@ -1,169 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._serialization import Serializer -from .._vendor import _convert_request - -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, job_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/skus", - ) # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "jobName": _SERIALIZER.url("job_name", job_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class SkuOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.streamanalytics.StreamAnalyticsManagementClient`'s - :attr:`sku` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> Iterable["_models.GetStreamingJobSkuResult"]: - """Gets a list of available SKUs about the specified streaming job. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param job_name: The name of the streaming job. Required. - :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either GetStreamingJobSkuResult or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - cls: ClsType[_models.GetStreamingJobSkuResults] = kwargs.pop("cls", None) - - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_request( - resource_group_name=resource_group_name, - job_name=job_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize("GetStreamingJobSkuResults", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - list.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/skus" - } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py index 9beada0c7c24..bd6d8a5ae661 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -15,13 +17,14 @@ ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,8 +32,11 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -50,7 +56,7 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -90,7 +96,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -126,7 +132,7 @@ def build_delete_request(resource_group_name: str, job_name: str, subscription_i _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -159,7 +165,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -194,7 +200,7 @@ def build_list_by_resource_group_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -226,7 +232,7 @@ def build_list_request(subscription_id: str, *, expand: Optional[str] = None, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -254,7 +260,7 @@ def build_start_request(resource_group_name: str, job_name: str, subscription_id _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -288,7 +294,7 @@ def build_stop_request(resource_group_name: str, job_name: str, subscription_id: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -319,7 +325,7 @@ def build_scale_request(resource_group_name: str, job_name: str, subscription_id _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -372,12 +378,12 @@ def _create_or_replace_initial( self, resource_group_name: str, job_name: str, - streaming_job: Union[_models.StreamingJob, IO], + streaming_job: Union[_models.StreamingJob, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any - ) -> _models.StreamingJob: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -388,9 +394,9 @@ def _create_or_replace_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -400,7 +406,7 @@ def _create_or_replace_initial( else: _json = self._serialize.body(streaming_job, "StreamingJob") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -410,45 +416,38 @@ def _create_or_replace_initial( content_type=content_type, json=_json, content=_content, - template_url=self._create_or_replace_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("StreamingJob", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - _create_or_replace_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } - @overload def begin_create_or_replace( self, @@ -482,14 +481,6 @@ def begin_create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either StreamingJob or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -501,7 +492,7 @@ def begin_create_or_replace( self, resource_group_name: str, job_name: str, - streaming_job: IO, + streaming_job: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -517,7 +508,7 @@ def begin_create_or_replace( :type job_name: str :param streaming_job: The definition of the streaming job that will be used to create a new streaming job or replace the existing one. Required. - :type streaming_job: IO + :type streaming_job: IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -529,14 +520,6 @@ def begin_create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either StreamingJob or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -548,7 +531,7 @@ def begin_create_or_replace( self, resource_group_name: str, job_name: str, - streaming_job: Union[_models.StreamingJob, IO], + streaming_job: Union[_models.StreamingJob, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -561,9 +544,9 @@ def begin_create_or_replace( :param job_name: The name of the streaming job. Required. :type job_name: str :param streaming_job: The definition of the streaming job that will be used to create a new - streaming job or replace the existing one. Is either a StreamingJob type or a IO type. + streaming job or replace the existing one. Is either a StreamingJob type or a IO[bytes] type. Required. - :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO + :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -572,17 +555,6 @@ def begin_create_or_replace( updating an existing record set. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either StreamingJob or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.StreamingJob] @@ -591,7 +563,7 @@ def begin_create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -611,6 +583,7 @@ def begin_create_or_replace( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): @@ -618,9 +591,9 @@ def get_long_running_output(pipeline_response): response = pipeline_response.http_response response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = self._deserialize("StreamingJob", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized if polling is True: @@ -630,17 +603,15 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[_models.StreamingJob].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return LROPoller[_models.StreamingJob]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) @overload def update( @@ -674,7 +645,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: @@ -685,7 +655,7 @@ def update( self, resource_group_name: str, job_name: str, - streaming_job: IO, + streaming_job: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -704,7 +674,7 @@ def update( Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Required. - :type streaming_job: IO + :type streaming_job: IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -712,7 +682,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: @@ -723,7 +692,7 @@ def update( self, resource_group_name: str, job_name: str, - streaming_job: Union[_models.StreamingJob, IO], + streaming_job: Union[_models.StreamingJob, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.StreamingJob: @@ -739,21 +708,17 @@ def update( corresponding properties in the existing streaming job (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Is - either a StreamingJob type or a IO type. Required. - :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO + either a StreamingJob type or a IO[bytes] type. Required. + :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO[bytes] :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -764,7 +729,7 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) @@ -776,7 +741,7 @@ def update( else: _json = self._serialize.body(streaming_job, "StreamingJob") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -785,16 +750,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -807,21 +770,15 @@ def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = self._deserialize("StreamingJob", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return deserialized # type: ignore - def _delete_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> None: - error_map = { + def _delete_initial(self, resource_group_name: str, job_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -832,39 +789,42 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_delete_request( + _request = build_delete_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _delete_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return deserialized # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) -> LROPoller[None]: @@ -875,14 +835,6 @@ def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) - :type resource_group_name: str :param job_name: The name of the streaming job. Required. :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -890,13 +842,13 @@ def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) - _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( # type: ignore + raw_result = self._delete_initial( resource_group_name=resource_group_name, job_name=job_name, api_version=api_version, @@ -905,11 +857,12 @@ def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) - params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -918,17 +871,13 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_delete.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace def get( @@ -946,12 +895,11 @@ def get( parameter is absent. The default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and 'functions'. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: StreamingJob or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -962,25 +910,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -993,16 +939,12 @@ def get( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("StreamingJob", pipeline_response) + deserialized = self._deserialize("StreamingJob", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" - } + return deserialized # type: ignore @distributed_trace def list_by_resource_group( @@ -1018,7 +960,6 @@ def list_by_resource_group( parameter is absent. The default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and 'functions'. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either StreamingJob or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.StreamingJob] :raises ~azure.core.exceptions.HttpResponseError: @@ -1026,10 +967,10 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1040,24 +981,32 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - request = build_list_by_resource_group_request( + _request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) @@ -1067,11 +1016,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1084,10 +1033,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs" - } - @distributed_trace def list(self, expand: Optional[str] = None, **kwargs: Any) -> Iterable["_models.StreamingJob"]: """Lists all of the streaming jobs in the given subscription. @@ -1097,7 +1042,6 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> Iterable["_models parameter is absent. The default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and 'functions'. Default value is None. :type expand: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either StreamingJob or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.StreamingJob] :raises ~azure.core.exceptions.HttpResponseError: @@ -1105,10 +1049,10 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> Iterable["_models _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1119,23 +1063,31 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> Iterable["_models def prepare_request(next_link=None): if not next_link: - request = build_list_request( + _request = build_list_request( subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, - template_url=self.list.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - request.method = "GET" - return request + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + _request.url = self._client.format_url(_request.url) + _request.method = "GET" + return _request def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) @@ -1145,11 +1097,11 @@ def extract_data(pipeline_response): return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): - request = prepare_request(next_link) + _request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1162,16 +1114,14 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs"} - - def _start_initial( # pylint: disable=inconsistent-return-statements + def _start_initial( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO]] = None, + start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1182,9 +1132,9 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1197,7 +1147,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements else: _json = None - request = build_start_request( + _request = build_start_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -1205,31 +1155,34 @@ def _start_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _start_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" - } + return deserialized # type: ignore @overload def begin_start( @@ -1255,14 +1208,6 @@ def begin_start( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1273,7 +1218,7 @@ def begin_start( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional[IO] = None, + start_job_parameters: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1288,18 +1233,10 @@ def begin_start( :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. Default value is None. - :type start_job_parameters: IO + :type start_job_parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1310,7 +1247,7 @@ def begin_start( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO]] = None, + start_job_parameters: Optional[Union[_models.StartStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any ) -> LROPoller[None]: """Starts a streaming job. Once a job is started it will start processing input events and produce @@ -1322,20 +1259,9 @@ def begin_start( :param job_name: The name of the streaming job. Required. :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. Is - either a StartStreamingJobParameters type or a IO type. Default value is None. + either a StartStreamingJobParameters type or a IO[bytes] type. Default value is None. :type start_job_parameters: ~azure.mgmt.streamanalytics.models.StartStreamingJobParameters or - IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + IO[bytes] :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1343,14 +1269,14 @@ def begin_start( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._start_initial( # type: ignore + raw_result = self._start_initial( resource_group_name=resource_group_name, job_name=job_name, start_job_parameters=start_job_parameters, @@ -1361,11 +1287,12 @@ def begin_start( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -1374,22 +1301,16 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_start.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _stop_initial( # pylint: disable=inconsistent-return-statements - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> None: - error_map = { + def _stop_initial(self, resource_group_name: str, job_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1400,39 +1321,42 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - cls: ClsType[None] = kwargs.pop("cls", None) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - request = build_stop_request( + _request = build_stop_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _stop_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" - } + return deserialized # type: ignore @distributed_trace def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> LROPoller[None]: @@ -1444,14 +1368,6 @@ def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> :type resource_group_name: str :param job_name: The name of the streaming job. Required. :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1459,13 +1375,13 @@ def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._stop_initial( # type: ignore + raw_result = self._stop_initial( resource_group_name=resource_group_name, job_name=job_name, api_version=api_version, @@ -1474,11 +1390,12 @@ def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -1487,26 +1404,22 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_stop.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _scale_initial( # pylint: disable=inconsistent-return-statements + def _scale_initial( self, resource_group_name: str, job_name: str, - scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO]] = None, + scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any - ) -> None: - error_map = { + ) -> Iterator[bytes]: + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1517,9 +1430,9 @@ def _scale_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1532,7 +1445,7 @@ def _scale_initial( # pylint: disable=inconsistent-return-statements else: _json = None - request = build_scale_request( + _request = build_scale_request( resource_group_name=resource_group_name, job_name=job_name, subscription_id=self._config.subscription_id, @@ -1540,31 +1453,34 @@ def _scale_initial( # pylint: disable=inconsistent-return-statements content_type=content_type, json=_json, content=_content, - template_url=self._scale_initial.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) - _stream = False + _decompress = kwargs.pop("decompress", True) + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline, decompress=_decompress) + if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - _scale_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" - } + return deserialized # type: ignore @overload def begin_scale( @@ -1589,14 +1505,6 @@ def begin_scale( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1607,7 +1515,7 @@ def begin_scale( self, resource_group_name: str, job_name: str, - scale_job_parameters: Optional[IO] = None, + scale_job_parameters: Optional[IO[bytes]] = None, *, content_type: str = "application/json", **kwargs: Any @@ -1621,18 +1529,10 @@ def begin_scale( :type job_name: str :param scale_job_parameters: Parameters applicable to a scale streaming job operation. Default value is None. - :type scale_job_parameters: IO + :type scale_job_parameters: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1643,7 +1543,7 @@ def begin_scale( self, resource_group_name: str, job_name: str, - scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO]] = None, + scale_job_parameters: Optional[Union[_models.ScaleStreamingJobParameters, IO[bytes]]] = None, **kwargs: Any ) -> LROPoller[None]: """Scales a streaming job when the job is running. @@ -1654,20 +1554,9 @@ def begin_scale( :param job_name: The name of the streaming job. Required. :type job_name: str :param scale_job_parameters: Parameters applicable to a scale streaming job operation. Is - either a ScaleStreamingJobParameters type or a IO type. Default value is None. + either a ScaleStreamingJobParameters type or a IO[bytes] type. Default value is None. :type scale_job_parameters: ~azure.mgmt.streamanalytics.models.ScaleStreamingJobParameters or - IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. + IO[bytes] :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1675,14 +1564,14 @@ def begin_scale( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._scale_initial( # type: ignore + raw_result = self._scale_initial( resource_group_name=resource_group_name, job_name=job_name, scale_job_parameters=scale_job_parameters, @@ -1693,11 +1582,12 @@ def begin_scale( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: - return cls(pipeline_response, None, {}) + return cls(pipeline_response, None, {}) # type: ignore if polling is True: polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) @@ -1706,14 +1596,10 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- else: polling_method = polling if cont_token: - return LROPoller.from_continuation_token( + return LROPoller[None].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_scale.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" - } + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py index fcf12c5cb40d..415252f2afda 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,8 +6,8 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from io import IOBase -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import sys +from typing import Any, Callable, Dict, Optional, Type, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,18 +18,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -41,7 +41,7 @@ def build_list_quotas_request(location: str, subscription_id: str, **kwargs: Any _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -65,161 +65,6 @@ def build_list_quotas_request(location: str, subscription_id: str, **kwargs: Any return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_test_query_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_compile_query_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_sample_input_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_test_input_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_test_output_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - class SubscriptionsOperations: """ .. warning:: @@ -243,15 +88,15 @@ def __init__(self, *args, **kwargs): def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuotasListResult: """Retrieves the subscription's current quota information in a particular region. - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. + :param location: The region in which to retrieve the subscription's quota information. You can + find out which regions Azure Stream Analytics is supported in here: + https://azure.microsoft.com/en-us/regions/. Required. :type location: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: SubscriptionQuotasListResult or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.SubscriptionQuotasListResult :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -262,350 +107,21 @@ def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuota _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SubscriptionQuotasListResult] = kwargs.pop("cls", None) - request = build_list_quotas_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list_quotas.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("SubscriptionQuotasListResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - list_quotas.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas" - } - - def _test_query_initial( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> Optional[_models.QueryTestingResult]: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.QueryTestingResult]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_query, (IOBase, bytes)): - _content = test_query - else: - _json = self._serialize.body(test_query, "TestQuery") - - request = build_test_query_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_query_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_query_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery" - } - - @overload - def begin_test_query( - self, location: str, test_query: _models.TestQuery, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_test_query( - self, location: str, test_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_test_query( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> LROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Is either a TestQuery type or a IO type. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.QueryTestingResult] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._test_query_initial( - location=location, - test_query=test_query, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test_query.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery" - } - - @overload - def compile_query( - self, - location: str, - compile_query: _models.CompileQuery, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def compile_query( - self, location: str, compile_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def compile_query( - self, location: str, compile_query: Union[_models.CompileQuery, IO], **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Is either a CompileQuery type or a IO type. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.QueryCompilationResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(compile_query, (IOBase, bytes)): - _content = compile_query - else: - _json = self._serialize.body(compile_query, "CompileQuery") - - request = build_compile_query_request( + _request = build_list_quotas_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self.compile_query.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -615,607 +131,9 @@ def compile_query( error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize("QueryCompilationResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - compile_query.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery" - } - - def _sample_input_initial( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> _models.SampleInputResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SampleInputResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(sample_input, (IOBase, bytes)): - _content = sample_input - else: - _json = self._serialize.body(sample_input, "SampleInput") - - request = build_sample_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._sample_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("SampleInputResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _sample_input_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput" - } - - @overload - def begin_sample_input( - self, location: str, sample_input: _models.SampleInput, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_sample_input( - self, location: str, sample_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_sample_input( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> LROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Is either a SampleInput type or a IO type. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SampleInputResult] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._sample_input_initial( - location=location, - sample_input=sample_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SampleInputResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_sample_input.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput" - } - - def _test_input_initial( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_input, (IOBase, bytes)): - _content = test_input - else: - _json = self._serialize.body(test_input, "TestInput") - - request = build_test_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) + deserialized = self._deserialize("SubscriptionQuotasListResult", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_input_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput" - } - - @overload - def begin_test_input( - self, location: str, test_input: _models.TestInput, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_test_input( - self, location: str, test_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ + return cls(pipeline_response, deserialized, {}) # type: ignore - @distributed_trace - def begin_test_input( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. Is - either a TestInput type or a IO type. Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._test_input_initial( - location=location, - test_input=test_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test_input.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput" - } - - def _test_output_initial( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_output, (IOBase, bytes)): - _content = test_output - else: - _json = self._serialize.body(test_output, "TestOutput") - - request = build_test_output_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_output_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_output_initial.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput" - } - - @overload - def begin_test_output( - self, location: str, test_output: _models.TestOutput, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_test_output( - self, location: str, test_output: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_test_output( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Is either a TestOutput type or a IO type. Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.TestDatasourceResult] = kwargs.pop("cls", None) - polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token: Optional[str] = kwargs.pop("continuation_token", None) - if cont_token is None: - raw_result = self._test_output_initial( - location=location, - test_output=test_output, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method: PollingMethod = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - - begin_test_output.metadata = { - "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput" - } + return deserialized # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py index cdfb84a0f943..9dd6ea6e2f51 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,7 +7,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload +import sys +from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -18,16 +19,18 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -48,7 +51,7 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -95,7 +98,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -134,7 +137,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -218,7 +221,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -230,7 +232,7 @@ def create_or_replace( resource_group_name: str, job_name: str, transformation_name: str, - transformation: IO, + transformation: IO[bytes], if_match: Optional[str] = None, if_none_match: Optional[str] = None, *, @@ -249,7 +251,7 @@ def create_or_replace( :type transformation_name: str :param transformation: The definition of the transformation that will be used to create a new transformation or replace the existing one under the streaming job. Required. - :type transformation: IO + :type transformation: IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -261,7 +263,6 @@ def create_or_replace( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -273,7 +274,7 @@ def create_or_replace( resource_group_name: str, job_name: str, transformation_name: str, - transformation: Union[_models.Transformation, IO], + transformation: Union[_models.Transformation, IO[bytes]], if_match: Optional[str] = None, if_none_match: Optional[str] = None, **kwargs: Any @@ -290,8 +291,8 @@ def create_or_replace( :type transformation_name: str :param transformation: The definition of the transformation that will be used to create a new transformation or replace the existing one under the streaming job. Is either a Transformation - type or a IO type. Required. - :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO + type or a IO[bytes] type. Required. + :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -300,15 +301,11 @@ def create_or_replace( updating an existing transformation. Other values will result in a 412 Pre-condition Failed response. Default value is None. :type if_none_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -319,7 +316,7 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) @@ -331,7 +328,7 @@ def create_or_replace( else: _json = self._serialize.body(transformation, "Transformation") - request = build_create_or_replace_request( + _request = build_create_or_replace_request( resource_group_name=resource_group_name, job_name=job_name, transformation_name=transformation_name, @@ -342,16 +339,14 @@ def create_or_replace( content_type=content_type, json=_json, content=_content, - template_url=self.create_or_replace.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -362,25 +357,15 @@ def create_or_replace( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - if response.status_code == 200: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - - deserialized = self._deserialize("Transformation", pipeline_response) - - if response.status_code == 201: - response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Transformation", pipeline_response) + deserialized = self._deserialize("Transformation", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore - create_or_replace.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" - } - @overload def update( self, @@ -417,7 +402,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -429,7 +413,7 @@ def update( resource_group_name: str, job_name: str, transformation_name: str, - transformation: IO, + transformation: IO[bytes], if_match: Optional[str] = None, *, content_type: str = "application/json", @@ -451,7 +435,7 @@ def update( updated). Any properties that are set to null here will mean that the corresponding property in the existing transformation will remain the same and not change as a result of this PATCH operation. Required. - :type transformation: IO + :type transformation: IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. @@ -459,7 +443,6 @@ def update( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: @@ -471,7 +454,7 @@ def update( resource_group_name: str, job_name: str, transformation_name: str, - transformation: Union[_models.Transformation, IO], + transformation: Union[_models.Transformation, IO[bytes]], if_match: Optional[str] = None, **kwargs: Any ) -> _models.Transformation: @@ -490,21 +473,17 @@ def update( the corresponding properties in the existing transformation (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing transformation will remain the same and not change as a result of this PATCH - operation. Is either a Transformation type or a IO type. Required. - :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO + operation. Is either a Transformation type or a IO[bytes] type. Required. + :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO[bytes] :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. Default value is None. :type if_match: str - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -515,7 +494,7 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) @@ -527,7 +506,7 @@ def update( else: _json = self._serialize.body(transformation, "Transformation") - request = build_update_request( + _request = build_update_request( resource_group_name=resource_group_name, job_name=job_name, transformation_name=transformation_name, @@ -537,16 +516,14 @@ def update( content_type=content_type, json=_json, content=_content, - template_url=self.update.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -559,16 +536,12 @@ def update( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Transformation", pipeline_response) + deserialized = self._deserialize("Transformation", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - update.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" - } + return deserialized # type: ignore @distributed_trace def get( @@ -583,12 +556,11 @@ def get( :type job_name: str :param transformation_name: The name of the transformation. Required. :type transformation_name: str - :keyword callable cls: A custom type or function that will be passed the direct response :return: Transformation or the result of cls(response) :rtype: ~azure.mgmt.streamanalytics.models.Transformation :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -599,25 +571,23 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) - request = build_get_request( + _request = build_get_request( resource_group_name=resource_group_name, job_name=job_name, transformation_name=transformation_name, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata["url"], headers=_headers, params=_params, ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) + _request.url = self._client.format_url(_request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=_stream, **kwargs + _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -630,13 +600,9 @@ def get( response_headers = {} response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) - deserialized = self._deserialize("Transformation", pipeline_response) + deserialized = self._deserialize("Transformation", pipeline_response.http_response) if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized + return cls(pipeline_response, deserialized, response_headers) # type: ignore - get.metadata = { - "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" - } + return deserialized # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_create.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_create.py index b134e4b6d401..dc5a44b3cfd7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_create.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Create.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Create.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_delete.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_delete.py index d0a55332091f..4c73257d1fe2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_delete.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -35,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_get.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_get.py index 1feadc8c2eef..d5d7cd3c1c03 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_get.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Get.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Get.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_resource_group.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_resource_group.py index 987e949242d0..aaaa7c844b36 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_resource_group.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_resource_group.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_ListByResourceGroup.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_ListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_subscription.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_subscription.py index a1d1b6b02b9f..2d1868924edd 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_subscription.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_subscription.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_ListBySubscription.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_ListBySubscription.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_streaming_jobs.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_streaming_jobs.py index 6ff6343627ea..097838a00f5e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_streaming_jobs.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_streaming_jobs.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_ListStreamingJobs.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_ListStreamingJobs.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_update.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_update.py index 5da858f51179..7bb40be9768f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_update.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Update.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Update.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml.py index 4b2ebed76277..c46dc732315f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -59,6 +60,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Create_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml_service.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml_service.py deleted file mode 100644 index 16eff1642eaf..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml_service.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python function_create_azure_ml_service.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.functions.create_or_replace( - resource_group_name="sjrg", - job_name="sjName", - function_name="function588", - function={ - "properties": { - "properties": { - "binding": { - "properties": { - "apiKey": "someApiKey==", - "batchSize": 1000, - "endpoint": "someAzureMLEndpointURL", - "inputRequestName": "Inputs", - "inputs": [{"dataType": "array", "mapTo": 0, "name": "data"}], - "numberOfParallelRequests": 1, - "outputResponseName": "Results", - "outputs": [{"dataType": "string", "name": "Sentiment"}], - }, - "type": "Microsoft.MachineLearningServices", - }, - "inputs": [{"dataType": "nvarchar(max)"}], - "output": {"dataType": "nvarchar(max)"}, - }, - "type": "Scalar", - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_AzureMLService.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_csharp.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_csharp.py deleted file mode 100644 index 3aeefbc04a94..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_csharp.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python function_create_csharp.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.functions.create_or_replace( - resource_group_name="sjrg", - job_name="sjName", - function_name="function588", - function={ - "properties": { - "properties": { - "binding": { - "properties": { - "class": "ASAEdgeUDFDemo.Class1", - "dllPath": "ASAEdgeApplication2_CodeBehind", - "method": "SquareFunction", - "updateMode": "Static", - }, - "type": "Microsoft.StreamAnalytics/CLRUdf", - }, - "inputs": [{"dataType": "nvarchar(max)"}], - "output": {"dataType": "nvarchar(max)"}, - }, - "type": "Scalar", - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_CSharp.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_java_script.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_java_script.py index 8b794f0ce66c..97dadc85d8fe 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_java_script.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_java_script.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -50,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Create_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_delete.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_delete.py index 77d85bc07893..1b5ba1506ae4 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_delete.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_azure_ml.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_azure_ml.py index fa1079e2dd1d..f05c29383f10 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_azure_ml.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_azure_ml.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Get_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Get_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_java_script.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_java_script.py index 57367f8343f8..60f3a1a771fd 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_java_script.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_java_script.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Get_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Get_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_list_by_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_list_by_streaming_job.py index ff9acc104a78..09ec571b4935 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_list_by_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_list_by_streaming_job.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_ListByStreamingJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_ListByStreamingJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_retrieve_default_definition_azure_ml.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_retrieve_default_definition_azure_ml.py index 04db158b5063..65de6d12d7bc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_retrieve_default_definition_azure_ml.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_retrieve_default_definition_azure_ml.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_RetrieveDefaultDefinition_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_RetrieveDefaultDefinition_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_azure_ml.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_azure_ml.py index 1a5f3857030e..954f1d4bc89b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_azure_ml.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_azure_ml.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -30,13 +31,13 @@ def main(): ) response = client.functions.begin_test( - resource_group_name="sjrg", - job_name="sjName", + resource_group_name="sjrg7", + job_name="sj9093", function_name="function588", ).result() print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Test_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Test_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_java_script.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_java_script.py index cf72e9f03064..7d025a869c1b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_java_script.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_java_script.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Test_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Test_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_azure_ml.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_azure_ml.py index c0849004fe49..b626e3f05252 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_azure_ml.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_azure_ml.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -45,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Update_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Update_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_java_script.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_java_script.py index 1d41d6d320ac..fd0d329e9f23 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_java_script.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_java_script.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -48,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Update_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Update_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_event_grid.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_event_grid.py deleted file mode 100644 index c0b2f6c1ff70..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_event_grid.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python input_create_event_grid.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.inputs.create_or_replace( - resource_group_name="sjrg3467", - job_name="sj9742", - input_name="input7970", - input={ - "properties": { - "datasource": { - "properties": { - "eventTypes": ["Microsoft.Storage.BlobCreated"], - "schema": "CloudEventSchema", - "storageAccounts": [ - {"accountKey": "myaccountkey", "accountName": "myaccount", "authenticationMode": "Msi"} - ], - "subscriber": { - "properties": { - "authenticationMode": "Msi", - "consumerGroupName": "sdkconsumergroup", - "eventHubName": "sdkeventhub", - "partitionCount": 16, - "serviceBusNamespace": "sdktest", - "sharedAccessPolicyKey": "someSharedAccessPolicyKey==", - "sharedAccessPolicyName": "RootManageSharedAccessKey", - }, - "type": "Microsoft.EventHub/EventHub", - }, - }, - "type": "Microsoft.EventGrid/EventSubscriptions", - }, - "type": "Stream", - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_EventGrid.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_gateway_message_bus.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_gateway_message_bus.py index aa300c3fc351..c92c078b6a68 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_gateway_message_bus.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_gateway_message_bus.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -43,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_GatewayMessageBus.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_GatewayMessageBus.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_blob_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_blob_csv.py index 95125f94196f..c0d378d1af62 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_blob_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_blob_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,14 +38,9 @@ def main(): "properties": { "datasource": { "properties": { - "blobName": "myblobinput", "container": "state", "dateFormat": "yyyy/MM/dd", - "deltaPathPattern": "/testBlob/{date}/delta/{time}/", - "deltaSnapshotRefreshRate": "16:14:30", - "fullSnapshotRefreshRate": "16:14:30", "pathPattern": "{date}/{time}", - "sourcePartitionCount": 16, "storageAccounts": [{"accountKey": "someAccountKey==", "accountName": "someAccountName"}], "timeFormat": "HH", }, @@ -58,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Reference_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Reference_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_file.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_file.py index c74534936c5f..692152ddf036 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_file.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_file.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Reference_File.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Reference_File.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_blob_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_blob_csv.py index 6782016f9932..5cd0e07ee729 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_blob_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_blob_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -54,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Stream_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Stream_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_event_hub_json.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_event_hub_json.py index 35ef9659167e..7e8294480f9e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_event_hub_json.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_event_hub_json.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -47,13 +48,12 @@ def main(): }, "serialization": {"properties": {"encoding": "UTF8"}, "type": "Json"}, "type": "Stream", - "watermarkSettings": {"watermarkMode": "ReadWatermark"}, } }, ) print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Stream_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Stream_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_io_thub_avro.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_io_thub_avro.py index ad69704313c4..ad01249824f5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_io_thub_avro.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_io_thub_avro.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -53,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Stream_IoTHub_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Stream_IoTHub_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_delete.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_delete.py index 29fa3a48f35f..80ecaa1aea01 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_delete.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_reference_blob_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_reference_blob_csv.py index c4e3352ce8c5..799992624013 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_reference_blob_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_reference_blob_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Reference_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Reference_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_blob_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_blob_csv.py index 2fcd5c7e83a4..583884d61288 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_blob_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_blob_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Stream_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Stream_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_event_hub_json.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_event_hub_json.py index f926049ecfa1..a59e53fb3868 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_event_hub_json.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_event_hub_json.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Stream_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Stream_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_io_thub_avro.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_io_thub_avro.py index 0f5e543bab94..f786083c48fd 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_io_thub_avro.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_io_thub_avro.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Stream_IoTHub_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Stream_IoTHub_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job.py index 761c48b30e1f..a8b185f27480 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_ListByStreamingJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_ListByStreamingJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job_diagnostics.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job_diagnostics.py index abd1001d97a2..097243a992db 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job_diagnostics.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job_diagnostics.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_ListByStreamingJob_Diagnostics.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_ListByStreamingJob_Diagnostics.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_test.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_test.py index 2e952dfa4cc4..1d26eb3d8148 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_test.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_test.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Test.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Test.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_reference_blob.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_reference_blob.py index 45b9d226b12c..381928485453 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_reference_blob.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_reference_blob.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -44,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Reference_Blob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Reference_Blob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_blob.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_blob.py index 1ffff9d36fd9..f1a97d765d7b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_blob.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_blob.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -44,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Stream_Blob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Stream_Blob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_event_hub.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_event_hub.py index b26583d511c9..423aa5bd180f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_event_hub.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_event_hub.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -47,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Stream_EventHub.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Stream_EventHub.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_io_thub.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_io_thub.py index 3e4ae16e9c3e..374af5495a28 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_io_thub.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_io_thub.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -47,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Stream_IoTHub.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Stream_IoTHub.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/operation_list.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/operation_list.py index 85f28e45dd9e..e3aca0cee690 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/operation_list.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/operation_list.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Operation_List.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Operation_List.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_explorer.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_explorer.py deleted file mode 100644 index 37ad0a712066..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_explorer.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python output_create_azure_data_explorer.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.outputs.create_or_replace( - resource_group_name="sjrg", - job_name="sjName", - output_name="adxOutput", - output={ - "properties": { - "datasource": { - "properties": { - "authenticationMode": "Msi", - "cluster": "https://asakustotest.eastus.kusto.windows.net", - "database": "dbname", - "table": "mytable", - }, - "type": "Microsoft.Kusto/clusters/databases", - } - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureDataExplorer.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_lake_store_json.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_lake_store_json.py index d5bd6f77475d..2c56eb0fba86 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_lake_store_json.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_lake_store_json.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -55,6 +56,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureDataLakeStore_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureDataLakeStore_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_function.py index ba501334d485..f8e020f85558 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_function.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -51,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureFunction.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureFunction.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_sql.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_sql.py index c05ce621b4de..2e9fedd6bd9f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_sql.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_sql.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -51,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureSQL.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureSQL.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_table.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_table.py index 3e40c7446b78..85f61e790463 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_table.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_table.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -53,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureTable.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureTable.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_blob_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_blob_csv.py index e8a1c7ac2ffa..4f000f05dd10 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_blob_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_blob_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,8 +38,6 @@ def main(): "properties": { "datasource": { "properties": { - "blobPathPrefix": "my/path", - "blobWriteMode": "Once", "container": "state", "dateFormat": "yyyy/MM/dd", "pathPattern": "{date}/{time}", @@ -54,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_data_warehouse.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_data_warehouse.py index edbd84d58d4b..804f864a1717 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_data_warehouse.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_data_warehouse.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,7 +38,6 @@ def main(): "properties": { "datasource": { "properties": { - "authenticationMode": "Msi", "database": "zhayaSQLpool", "password": "password123", "server": "asatestserver", @@ -52,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_DataWarehouse.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_DataWarehouse.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_delta_lake.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_delta_lake.py deleted file mode 100644 index 85bbf4df2162..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_delta_lake.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python output_create_delta_lake.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.outputs.create_or_replace( - resource_group_name="sjrg", - job_name="sjName", - output_name="output1221", - output={ - "properties": { - "datasource": { - "properties": { - "container": "deltaoutput", - "dateFormat": None, - "pathPattern": None, - "storageAccounts": [{"accountKey": "accountKey==", "accountName": "someAccountName"}], - "timeFormat": None, - }, - "type": "Microsoft.Storage/Blob", - }, - "serialization": { - "properties": {"deltaTablePath": "/folder1/table1", "partitionColumns": ["column1"]}, - "type": "Delta", - }, - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_DeltaLake.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_document_db.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_document_db.py index 21438058cffd..936e97b68268 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_document_db.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_document_db.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -39,7 +40,6 @@ def main(): "properties": { "accountId": "someAccountId", "accountKey": "accountKey==", - "authenticationMode": "Msi", "collectionNamePattern": "collection", "database": "db01", "documentId": "documentId", @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_DocumentDB.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_DocumentDB.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_event_hub_json.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_event_hub_json.py index d2f36463133b..be9749574e75 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_event_hub_json.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_event_hub_json.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -46,16 +47,12 @@ def main(): "type": "Microsoft.ServiceBus/EventHub", }, "serialization": {"properties": {"encoding": "UTF8", "format": "Array"}, "type": "Json"}, - "watermarkSettings": { - "maxWatermarkDifferenceAcrossPartitions": "16:14:30", - "watermarkMode": "SendCurrentPartitionWatermark", - }, } }, ) print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_gateway_message_bus.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_gateway_message_bus.py index 3ef91630c4ad..dba2262c84f5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_gateway_message_bus.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_gateway_message_bus.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_GatewayMessageBus.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_GatewayMessageBus.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_postgre_sql.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_postgre_sql.py deleted file mode 100644 index d724036a1c45..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_postgre_sql.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python output_create_postgre_sql.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.outputs.create_or_replace( - resource_group_name="sjrg7983", - job_name="sj2331", - output_name="output3022", - output={ - "properties": { - "datasource": { - "properties": { - "authenticationMode": "Msi", - "database": "someDatabase", - "maxWriterCount": 1, - "password": "somePassword", - "server": "someServer", - "table": "someTable", - "user": "user", - }, - "type": "Microsoft.DBForPostgreSQL/servers/databases", - } - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_PostgreSQL.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_power_bi.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_power_bi.py index 4f8c23d2b1f5..d553039bb6cb 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_power_bi.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_power_bi.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -53,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_PowerBI.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_PowerBI.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_queue_avro.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_queue_avro.py index 18a90a3dd713..d83ef56f79e3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_queue_avro.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_queue_avro.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -53,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_ServiceBusQueue_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_ServiceBusQueue_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_topic_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_topic_csv.py index a95b356a2946..fb17c2e37593 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_topic_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_topic_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -52,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_ServiceBusTopic_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_ServiceBusTopic_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_delete.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_delete.py index b7b4bd3f18bd..4c54cbd2fe0a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_delete.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): ) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_data_lake_store_json.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_data_lake_store_json.py index d80e4224ae3a..8a9afb891953 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_data_lake_store_json.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_data_lake_store_json.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureDataLakeStore_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureDataLakeStore_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_function.py index 75e980fd1f13..7eebeabeb1ef 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_function.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureFunction.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureFunction.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_sql.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_sql.py index 74b23920e0bb..193f7027af97 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_sql.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_sql.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureSQL.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureSQL.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_table.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_table.py index d23768962c47..84a0b0298dd2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_table.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_table.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureTable.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureTable.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_blob_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_blob_csv.py index 83fc2b925bbf..6e679a8ba691 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_blob_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_blob_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_data_warehouse.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_data_warehouse.py index d208aafc83c4..9ee177c321fe 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_data_warehouse.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_data_warehouse.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_DataWarehouse.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_DataWarehouse.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_delta_lake.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_delta_lake.py deleted file mode 100644 index 1deaa980996e..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_delta_lake.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python output_get_delta_lake.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.outputs.get( - resource_group_name="sjrg", - job_name="sjName", - output_name="output1221", - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_DeltaLake.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_document_db.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_document_db.py index 2435247bdad5..314f9c4ee028 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_document_db.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_document_db.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_DocumentDB.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_DocumentDB.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_event_hub_json.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_event_hub_json.py index 11cce6ff6f36..fa623dab3823 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_event_hub_json.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_event_hub_json.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_power_bi.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_power_bi.py index 777722b9daed..e4901f9cbbb7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_power_bi.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_power_bi.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_PowerBI.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_PowerBI.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_queue_avro.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_queue_avro.py index e4ffa6d5a903..fc353220bfbb 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_queue_avro.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_queue_avro.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_ServiceBusQueue_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_ServiceBusQueue_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_topic_csv.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_topic_csv.py index e90b158e7ad6..ab72eac2e496 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_topic_csv.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_topic_csv.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_ServiceBusTopic_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_ServiceBusTopic_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_list_by_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_list_by_streaming_job.py index 949447f7d892..a4c57973139d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_list_by_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_list_by_streaming_job.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_ListByStreamingJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_ListByStreamingJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_test.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_test.py index 30828eb01643..b96b8641049d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_test.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_test.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Test.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Test.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_data_lake_store.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_data_lake_store.py index 09ebb9a1e913..d2671f87641d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_data_lake_store.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_data_lake_store.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -46,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureDataLakeStore.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureDataLakeStore.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_function.py index 9f7d09b8c8a3..79de4582b22b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_function.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -45,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureFunction.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureFunction.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_sql.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_sql.py index a301c62af448..b3c48522b3b9 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_sql.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_sql.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -42,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureSQL.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureSQL.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_table.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_table.py index 425ea606777e..2db644b25dd0 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_table.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_table.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -45,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureTable.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureTable.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_blob.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_blob.py index 952c628736d9..dc49d354796b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_blob.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_blob.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -43,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_Blob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_Blob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_data_warehouse.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_data_warehouse.py index 7443b2bb61c7..37a19b5d4178 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_data_warehouse.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_data_warehouse.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -42,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_DataWarehouse.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_DataWarehouse.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_delta_lake.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_delta_lake.py deleted file mode 100644 index 091fc0520816..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_delta_lake.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python output_update_delta_lake.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.outputs.update( - resource_group_name="sjrg", - job_name="sjName", - output_name="output1221", - output={ - "properties": { - "datasource": {"properties": {"container": "deltaoutput2"}, "type": "Microsoft.Storage/Blob"}, - "serialization": { - "properties": {"deltaTablePath": "/folder1/table2", "partitionColumns": ["column2"]}, - "type": "Delta", - }, - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_DeltaLake.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_document_db.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_document_db.py index 109bb6610c0e..101e91c34f34 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_document_db.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_document_db.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -45,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_DocumentDB.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_DocumentDB.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_event_hub.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_event_hub.py index f9702f019320..bea6cda34f9a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_event_hub.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_event_hub.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -46,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_EventHub.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_EventHub.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_power_bi.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_power_bi.py index 43ce1c7ad0bd..55809ddc058c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_power_bi.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_power_bi.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_PowerBI.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_PowerBI.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_queue.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_queue.py index c5d7496f42e5..6c7f94c17506 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_queue.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_queue.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -43,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_ServiceBusQueue.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_ServiceBusQueue.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_topic.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_topic.py index d84e679902a9..7e5959e15fa9 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_topic.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_topic.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -43,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_ServiceBusTopic.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_ServiceBusTopic.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_create.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_create.py index 12a0cf30373e..3ea674a69542 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_create.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -49,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_Create.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_Create.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_delete.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_delete.py index f24ce8ea548f..64af13549992 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_delete.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_get.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_get.py index 21f437407032..c4e5c562a0d7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_get.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_Get.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_Get.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_list_by_cluster.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_list_by_cluster.py index c2f5e219429e..f28a78b20939 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_list_by_cluster.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_list_by_cluster.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_ListByCluster.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_ListByCluster.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_complete_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_complete_job.py index f867666251e5..459af86459e3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_complete_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_complete_job.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -40,18 +41,6 @@ def main(): "eventsLateArrivalMaxDelayInSeconds": 5, "eventsOutOfOrderMaxDelayInSeconds": 0, "eventsOutOfOrderPolicy": "Drop", - "externals": { - "container": "mycontainer", - "path": "UserCustomCode.zip", - "refreshConfiguration": { - "dateFormat": "yyyy-dd-MM", - "pathPattern": "{date}\\\\{time}", - "refreshInterval": "00:01:00", - "refreshType": "Nonblocking", - "timeFormat": "HH", - }, - "storageAccount": {"accountKey": "mykey", "accountName": "mystorageaccount"}, - }, "functions": [], "inputs": [ { @@ -102,6 +91,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Create_CompleteJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Create_CompleteJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_job_shell.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_job_shell.py index 22cc176276df..036ad2476ecd 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_job_shell.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_job_shell.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -52,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Create_JobShell.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Create_JobShell.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_user_assigned_identity.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_user_assigned_identity.py deleted file mode 100644 index 0851a71ba1a7..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_user_assigned_identity.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python streaming_job_create_user_assigned_identity.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.streaming_jobs.begin_create_or_replace( - resource_group_name="sjrg", - job_name="sjName", - streaming_job={ - "identity": { - "type": "UserAssigned", - "userAssignedIdentities": { - "/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sdkIdentity": {} - }, - }, - "location": "West US", - "properties": { - "compatibilityLevel": "1.0", - "dataLocale": "en-US", - "eventsLateArrivalMaxDelayInSeconds": 16, - "eventsOutOfOrderMaxDelayInSeconds": 5, - "eventsOutOfOrderPolicy": "Drop", - "functions": [], - "inputs": [], - "outputErrorPolicy": "Drop", - "outputs": [], - "sku": {"name": "Standard"}, - }, - "tags": {"key1": "value1", "key3": "value3", "randomKey": "randomValue"}, - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Create_UserAssignedIdentity.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_delete.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_delete.py index d89e76fb78a7..faef882dd51d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_delete.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_delete.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -35,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_expand.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_expand.py index 2fcfe2d677b7..fa2d34bef7dc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_expand.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_expand.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Get_Expand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Get_Expand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_no_expand.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_no_expand.py index 8cef51704926..48faada282d2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_no_expand.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_no_expand.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Get_NoExpand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Get_NoExpand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_skus.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_skus.py deleted file mode 100644 index 4e615b490db3..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_skus.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python streaming_job_get_skus.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.sku.list( - resource_group_name="sjrg3276", - job_name="sj7804", - ) - for item in response: - print(item) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_GetSkus.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_expand.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_expand.py index 70004c44d881..d7183cdd6f73 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_expand.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_expand.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_ByResourceGroup_Expand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_ByResourceGroup_Expand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_no_expand.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_no_expand.py index 9e46ded1b5fa..ea8ee5d65dc2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_no_expand.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_no_expand.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -36,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_ByResourceGroup_NoExpand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_ByResourceGroup_NoExpand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_expand.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_expand.py index 7ea00d128a51..d223923bbddf 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_expand.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_expand.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_BySubscription_Expand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_BySubscription_Expand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_no_expand.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_no_expand.py index 2dca28ce46b6..e481e385b00d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_no_expand.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_no_expand.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -34,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_BySubscription_NoExpand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_BySubscription_NoExpand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_scale.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_scale.py index f12f296ad97c..fb03687af593 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_scale.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_scale.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -30,11 +31,11 @@ def main(): ) client.streaming_jobs.begin_scale( - resource_group_name="sjrg", - job_name="sjName", + resource_group_name="sjrg6936", + job_name="sj59", ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Scale.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Scale.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_custom_time.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_custom_time.py index 50959e62f3e6..5db84d74f85d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_custom_time.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_custom_time.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -35,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Start_CustomTime.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Start_CustomTime.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_job_start_time.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_job_start_time.py index 47c6aebf82f3..6c447a5b6456 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_job_start_time.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_job_start_time.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -35,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Start_JobStartTime.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Start_JobStartTime.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_last_output_event_time.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_last_output_event_time.py index 7f5d324101de..a28ae571ddf9 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_last_output_event_time.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_last_output_event_time.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -35,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Start_LastOutputEventTime.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Start_LastOutputEventTime.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_stop.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_stop.py index 75ecf29387c1..9d795b8f3088 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_stop.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_stop.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -35,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Stop.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Stop.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_update.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_update.py index ff709abe2cd7..0f5cb70eab83 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_update.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -39,6 +40,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Update.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Update.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_compile_query.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_compile_query.py deleted file mode 100644 index 0076e9b5261a..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_compile_query.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python subscription_compile_query.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.compile_query( - location="West US", - compile_query={ - "compatibilityLevel": "1.2", - "functions": [ - { - "bindingType": "Microsoft.StreamAnalytics/JavascriptUdf", - "inputs": [{"dataType": "any", "isConfigurationParameter": None}], - "name": "function1", - "output": {"dataType": "bigint"}, - "type": "Scalar", - } - ], - "inputs": [{"name": "input1", "type": "Stream"}], - "jobType": "Cloud", - "query": "SELECT\r\n *\r\nINTO\r\n [output1]\r\nFROM\r\n [input1]", - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_CompileQuery.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_list_quotas.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_list_quotas.py index c21ede49a182..8b8fe328305e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_list_quotas.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_list_quotas.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -35,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_ListQuotas.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Subscription_ListQuotas.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_sample_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_sample_input.py deleted file mode 100644 index d14f97090754..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_sample_input.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python subscription_sample_input.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_sample_input( - location="West US", - sample_input={ - "compatibilityLevel": "1.2", - "dataLocale": "en-US", - "eventsUri": "http://myoutput.com", - "input": { - "properties": { - "datasource": { - "properties": { - "container": "state", - "dateFormat": "yyyy/MM/dd", - "pathPattern": "{date}/{time}", - "sourcePartitionCount": 16, - "storageAccounts": [{"accountKey": "someAccountKey==", "accountName": "someAccountName"}], - "timeFormat": "HH", - }, - "type": "Microsoft.Storage/Blob", - }, - "serialization": {"properties": {"encoding": "UTF8", "fieldDelimiter": ","}, "type": "Csv"}, - "type": "Stream", - } - }, - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_SampleInput.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_input.py deleted file mode 100644 index 7f630b5e0120..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_input.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python subscription_test_input.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_test_input( - location="West US", - test_input={ - "input": { - "properties": { - "datasource": { - "properties": { - "container": "state", - "dateFormat": "yyyy/MM/dd", - "pathPattern": "{date}/{time}", - "sourcePartitionCount": 16, - "storageAccounts": [{"accountKey": "someAccountKey==", "accountName": "someAccountName"}], - "timeFormat": "HH", - }, - "type": "Microsoft.Storage/Blob", - }, - "serialization": {"properties": {"encoding": "UTF8", "fieldDelimiter": ","}, "type": "Csv"}, - "type": "Stream", - } - } - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_TestInput.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_output.py deleted file mode 100644 index ae83133933b9..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_output.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python subscription_test_output.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_test_output( - location="West US", - test_output={ - "output": { - "properties": { - "datasource": { - "properties": { - "container": "state", - "dateFormat": "yyyy/MM/dd", - "pathPattern": "{date}/{time}", - "storageAccounts": [{"accountKey": "accountKey==", "accountName": "someAccountName"}], - "timeFormat": "HH", - }, - "type": "Microsoft.Storage/Blob", - }, - "serialization": {"properties": {"encoding": "UTF8", "fieldDelimiter": ","}, "type": "Csv"}, - } - } - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_TestOutput.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_query.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_query.py deleted file mode 100644 index 2e13c5978dbb..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_test_query.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python subscription_test_query.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_test_query( - location="West US", - test_query={ - "diagnostics": {"path": "/pathto/subdirectory", "writeUri": "http://myoutput.com"}, - "streamingJob": { - "location": "West US", - "properties": { - "compatibilityLevel": "1.0", - "dataLocale": "en-US", - "eventsLateArrivalMaxDelayInSeconds": 5, - "eventsOutOfOrderMaxDelayInSeconds": 0, - "eventsOutOfOrderPolicy": "Drop", - "functions": [], - "inputs": [ - { - "name": "inputtest", - "properties": { - "datasource": {"properties": {"payloadUri": "http://myinput.com"}, "type": "Raw"}, - "serialization": {"properties": {"encoding": "UTF8"}, "type": "Json"}, - "type": "Stream", - }, - } - ], - "outputErrorPolicy": "Drop", - "outputs": [ - { - "name": "outputtest", - "properties": { - "datasource": {"properties": {"payloadUri": "http://myoutput.com"}, "type": "Raw"}, - "serialization": {"type": "Json"}, - }, - } - ], - "sku": {"name": "Standard"}, - "transformation": { - "name": "transformationtest", - "properties": {"query": "Select Id, Name from inputtest", "streamingUnits": 1}, - }, - }, - "tags": {"key1": "value1", "key3": "value3", "randomKey": "randomValue"}, - }, - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_TestQuery.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_create.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_create.py index dd04cde7e95f..a7593e078172 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_create.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_create.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Transformation_Create.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Transformation_Create.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_get.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_get.py index d5adfd3a9a1c..3b5ab11facfd 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_get.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_get.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -37,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Transformation_Get.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Transformation_Get.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_update.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_update.py index 3f42af0c5b4d..3b067050ff62 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_update.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_update.py @@ -7,6 +7,7 @@ # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential + from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient """ @@ -38,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Transformation_Update.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Transformation_Update.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/conftest.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/conftest.py new file mode 100644 index 000000000000..ec3ae8a1681d --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/conftest.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# aovid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + streamanalyticsmanagement_subscription_id = os.environ.get( + "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + streamanalyticsmanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + streamanalyticsmanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + streamanalyticsmanagement_client_secret = os.environ.get( + "AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=streamanalyticsmanagement_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=streamanalyticsmanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=streamanalyticsmanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=streamanalyticsmanagement_client_secret, value="00000000-0000-0000-0000-000000000000" + ) + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_clusters_operations.py new file mode 100644 index 000000000000..244dce110f1b --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_clusters_operations.py @@ -0,0 +1,128 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementClustersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_update(self, resource_group): + response = self.client.clusters.begin_create_or_update( + resource_group_name=resource_group.name, + cluster_name="str", + cluster={ + "capacityAllocated": 0, + "capacityAssigned": 0, + "clusterId": "str", + "createdDate": "2020-02-20 00:00:00", + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_update(self, resource_group): + response = self.client.clusters.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + cluster={ + "capacityAllocated": 0, + "capacityAssigned": 0, + "clusterId": "str", + "createdDate": "2020-02-20 00:00:00", + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.clusters.get( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.clusters.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_subscription(self, resource_group): + response = self.client.clusters.list_by_subscription( + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.clusters.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_streaming_jobs(self, resource_group): + response = self.client.clusters.list_streaming_jobs( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_clusters_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_clusters_operations_async.py new file mode 100644 index 000000000000..49febc3fb100 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_clusters_operations_async.py @@ -0,0 +1,135 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementClustersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_update(self, resource_group): + response = await ( + await self.client.clusters.begin_create_or_update( + resource_group_name=resource_group.name, + cluster_name="str", + cluster={ + "capacityAllocated": 0, + "capacityAssigned": 0, + "clusterId": "str", + "createdDate": "2020-02-20 00:00:00", + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_update(self, resource_group): + response = await ( + await self.client.clusters.begin_update( + resource_group_name=resource_group.name, + cluster_name="str", + cluster={ + "capacityAllocated": 0, + "capacityAssigned": 0, + "clusterId": "str", + "createdDate": "2020-02-20 00:00:00", + "etag": "str", + "id": "str", + "location": "str", + "name": "str", + "provisioningState": "str", + "sku": {"capacity": 0, "name": "str"}, + "tags": {"str": "str"}, + "type": "str", + }, + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.clusters.get( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.clusters.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_subscription(self, resource_group): + response = self.client.clusters.list_by_subscription( + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.clusters.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_streaming_jobs(self, resource_group): + response = self.client.clusters.list_streaming_jobs( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_functions_operations.py new file mode 100644 index 000000000000..07b55b89446c --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_functions_operations.py @@ -0,0 +1,111 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementFunctionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_replace(self, resource_group): + response = self.client.functions.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + function={"id": "str", "name": "str", "properties": "function_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.functions.update( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + function={"id": "str", "name": "str", "properties": "function_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.functions.delete( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.functions.get( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_streaming_job(self, resource_group): + response = self.client.functions.list_by_streaming_job( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_test(self, resource_group): + response = self.client.functions.begin_test( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_retrieve_default_definition(self, resource_group): + response = self.client.functions.retrieve_default_definition( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_functions_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_functions_operations_async.py new file mode 100644 index 000000000000..e4af197a031b --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_functions_operations_async.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementFunctionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_replace(self, resource_group): + response = await self.client.functions.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + function={"id": "str", "name": "str", "properties": "function_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.functions.update( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + function={"id": "str", "name": "str", "properties": "function_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.functions.delete( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.functions.get( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_streaming_job(self, resource_group): + response = self.client.functions.list_by_streaming_job( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_test(self, resource_group): + response = await ( + await self.client.functions.begin_test( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_retrieve_default_definition(self, resource_group): + response = await self.client.functions.retrieve_default_definition( + resource_group_name=resource_group.name, + job_name="str", + function_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_inputs_operations.py new file mode 100644 index 000000000000..6d0e867e634d --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_inputs_operations.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementInputsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_replace(self, resource_group): + response = self.client.inputs.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + input={"id": "str", "name": "str", "properties": "input_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.inputs.update( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + input={"id": "str", "name": "str", "properties": "input_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.inputs.delete( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.inputs.get( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_streaming_job(self, resource_group): + response = self.client.inputs.list_by_streaming_job( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_test(self, resource_group): + response = self.client.inputs.begin_test( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_inputs_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_inputs_operations_async.py new file mode 100644 index 000000000000..6e9a66c1406e --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_inputs_operations_async.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementInputsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_replace(self, resource_group): + response = await self.client.inputs.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + input={"id": "str", "name": "str", "properties": "input_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.inputs.update( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + input={"id": "str", "name": "str", "properties": "input_properties", "type": "str"}, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.inputs.delete( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.inputs.get( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_streaming_job(self, resource_group): + response = self.client.inputs.list_by_streaming_job( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_test(self, resource_group): + response = await ( + await self.client.inputs.begin_test( + resource_group_name=resource_group.name, + job_name="str", + input_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_operations.py new file mode 100644 index 000000000000..5147fccd50bc --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_operations_async.py new file mode 100644 index 000000000000..ad74ba306685 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_operations_async.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_outputs_operations.py new file mode 100644 index 000000000000..090c28f98ce4 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_outputs_operations.py @@ -0,0 +1,118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementOutputsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_replace(self, resource_group): + response = self.client.outputs.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + output={ + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.outputs.update( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + output={ + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.outputs.delete( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.outputs.get( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_streaming_job(self, resource_group): + response = self.client.outputs.list_by_streaming_job( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_test(self, resource_group): + response = self.client.outputs.begin_test( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_outputs_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_outputs_operations_async.py new file mode 100644 index 000000000000..960ca4ae5a82 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_outputs_operations_async.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementOutputsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_replace(self, resource_group): + response = await self.client.outputs.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + output={ + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.outputs.update( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + output={ + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.outputs.delete( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.outputs.get( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_streaming_job(self, resource_group): + response = self.client.outputs.list_by_streaming_job( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_test(self, resource_group): + response = await ( + await self.client.outputs.begin_test( + resource_group_name=resource_group.name, + job_name="str", + output_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_private_endpoints_operations.py new file mode 100644 index 000000000000..dc766a48916a --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_private_endpoints_operations.py @@ -0,0 +1,89 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementPrivateEndpointsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.private_endpoints.create_or_update( + resource_group_name=resource_group.name, + cluster_name="str", + private_endpoint_name="str", + private_endpoint={ + "createdDate": "str", + "etag": "str", + "id": "str", + "manualPrivateLinkServiceConnections": [ + { + "groupIds": ["str"], + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "privateLinkServiceId": "str", + "requestMessage": "str", + } + ], + "name": "str", + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.private_endpoints.get( + resource_group_name=resource_group.name, + cluster_name="str", + private_endpoint_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.private_endpoints.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + private_endpoint_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_cluster(self, resource_group): + response = self.client.private_endpoints.list_by_cluster( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_private_endpoints_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_private_endpoints_operations_async.py new file mode 100644 index 000000000000..8187d24a6362 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_private_endpoints_operations_async.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementPrivateEndpointsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.private_endpoints.create_or_update( + resource_group_name=resource_group.name, + cluster_name="str", + private_endpoint_name="str", + private_endpoint={ + "createdDate": "str", + "etag": "str", + "id": "str", + "manualPrivateLinkServiceConnections": [ + { + "groupIds": ["str"], + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "privateLinkServiceId": "str", + "requestMessage": "str", + } + ], + "name": "str", + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.private_endpoints.get( + resource_group_name=resource_group.name, + cluster_name="str", + private_endpoint_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.private_endpoints.begin_delete( + resource_group_name=resource_group.name, + cluster_name="str", + private_endpoint_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_cluster(self, resource_group): + response = self.client.private_endpoints.list_by_cluster( + resource_group_name=resource_group.name, + cluster_name="str", + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_streaming_jobs_operations.py new file mode 100644 index 000000000000..ee34e7833721 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_streaming_jobs_operations.py @@ -0,0 +1,234 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementStreamingJobsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create_or_replace(self, resource_group): + response = self.client.streaming_jobs.begin_create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + streaming_job={ + "cluster": {"id": "str"}, + "compatibilityLevel": "str", + "contentStoragePolicy": "str", + "createdDate": "2020-02-20 00:00:00", + "dataLocale": "str", + "etag": "str", + "eventsLateArrivalMaxDelayInSeconds": 0, + "eventsOutOfOrderMaxDelayInSeconds": 0, + "eventsOutOfOrderPolicy": "str", + "functions": [{"id": "str", "name": "str", "properties": "function_properties", "type": "str"}], + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "inputs": [{"id": "str", "name": "str", "properties": "input_properties", "type": "str"}], + "jobId": "str", + "jobState": "str", + "jobStorageAccount": { + "accountKey": "str", + "accountName": "str", + "authenticationMode": "ConnectionString", + }, + "jobType": "str", + "lastOutputEventTime": "2020-02-20 00:00:00", + "location": "str", + "name": "str", + "outputErrorPolicy": "str", + "outputStartMode": "str", + "outputStartTime": "2020-02-20 00:00:00", + "outputs": [ + { + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + } + ], + "provisioningState": "str", + "sku": {"name": "str"}, + "tags": {"str": "str"}, + "transformation": { + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + "type": "str", + }, + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.streaming_jobs.update( + resource_group_name=resource_group.name, + job_name="str", + streaming_job={ + "cluster": {"id": "str"}, + "compatibilityLevel": "str", + "contentStoragePolicy": "str", + "createdDate": "2020-02-20 00:00:00", + "dataLocale": "str", + "etag": "str", + "eventsLateArrivalMaxDelayInSeconds": 0, + "eventsOutOfOrderMaxDelayInSeconds": 0, + "eventsOutOfOrderPolicy": "str", + "functions": [{"id": "str", "name": "str", "properties": "function_properties", "type": "str"}], + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "inputs": [{"id": "str", "name": "str", "properties": "input_properties", "type": "str"}], + "jobId": "str", + "jobState": "str", + "jobStorageAccount": { + "accountKey": "str", + "accountName": "str", + "authenticationMode": "ConnectionString", + }, + "jobType": "str", + "lastOutputEventTime": "2020-02-20 00:00:00", + "location": "str", + "name": "str", + "outputErrorPolicy": "str", + "outputStartMode": "str", + "outputStartTime": "2020-02-20 00:00:00", + "outputs": [ + { + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + } + ], + "provisioningState": "str", + "sku": {"name": "str"}, + "tags": {"str": "str"}, + "transformation": { + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_delete(self, resource_group): + response = self.client.streaming_jobs.begin_delete( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.streaming_jobs.get( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.streaming_jobs.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.streaming_jobs.list( + api_version="2020-03-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_start(self, resource_group): + response = self.client.streaming_jobs.begin_start( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_stop(self, resource_group): + response = self.client.streaming_jobs.begin_stop( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_scale(self, resource_group): + response = self.client.streaming_jobs.begin_scale( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_streaming_jobs_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_streaming_jobs_operations_async.py new file mode 100644 index 000000000000..58999cd4715e --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_streaming_jobs_operations_async.py @@ -0,0 +1,245 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementStreamingJobsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create_or_replace(self, resource_group): + response = await ( + await self.client.streaming_jobs.begin_create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + streaming_job={ + "cluster": {"id": "str"}, + "compatibilityLevel": "str", + "contentStoragePolicy": "str", + "createdDate": "2020-02-20 00:00:00", + "dataLocale": "str", + "etag": "str", + "eventsLateArrivalMaxDelayInSeconds": 0, + "eventsOutOfOrderMaxDelayInSeconds": 0, + "eventsOutOfOrderPolicy": "str", + "functions": [{"id": "str", "name": "str", "properties": "function_properties", "type": "str"}], + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "inputs": [{"id": "str", "name": "str", "properties": "input_properties", "type": "str"}], + "jobId": "str", + "jobState": "str", + "jobStorageAccount": { + "accountKey": "str", + "accountName": "str", + "authenticationMode": "ConnectionString", + }, + "jobType": "str", + "lastOutputEventTime": "2020-02-20 00:00:00", + "location": "str", + "name": "str", + "outputErrorPolicy": "str", + "outputStartMode": "str", + "outputStartTime": "2020-02-20 00:00:00", + "outputs": [ + { + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + } + ], + "provisioningState": "str", + "sku": {"name": "str"}, + "tags": {"str": "str"}, + "transformation": { + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + "type": "str", + }, + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.streaming_jobs.update( + resource_group_name=resource_group.name, + job_name="str", + streaming_job={ + "cluster": {"id": "str"}, + "compatibilityLevel": "str", + "contentStoragePolicy": "str", + "createdDate": "2020-02-20 00:00:00", + "dataLocale": "str", + "etag": "str", + "eventsLateArrivalMaxDelayInSeconds": 0, + "eventsOutOfOrderMaxDelayInSeconds": 0, + "eventsOutOfOrderPolicy": "str", + "functions": [{"id": "str", "name": "str", "properties": "function_properties", "type": "str"}], + "id": "str", + "identity": {"principalId": "str", "tenantId": "str", "type": "str"}, + "inputs": [{"id": "str", "name": "str", "properties": "input_properties", "type": "str"}], + "jobId": "str", + "jobState": "str", + "jobStorageAccount": { + "accountKey": "str", + "accountName": "str", + "authenticationMode": "ConnectionString", + }, + "jobType": "str", + "lastOutputEventTime": "2020-02-20 00:00:00", + "location": "str", + "name": "str", + "outputErrorPolicy": "str", + "outputStartMode": "str", + "outputStartTime": "2020-02-20 00:00:00", + "outputs": [ + { + "datasource": "output_data_source", + "diagnostics": {"conditions": [{"code": "str", "message": "str", "since": "str"}]}, + "etag": "str", + "id": "str", + "name": "str", + "serialization": "serialization", + "sizeWindow": 0, + "timeWindow": "str", + "type": "str", + } + ], + "provisioningState": "str", + "sku": {"name": "str"}, + "tags": {"str": "str"}, + "transformation": { + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + "type": "str", + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_delete(self, resource_group): + response = await ( + await self.client.streaming_jobs.begin_delete( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.streaming_jobs.get( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.streaming_jobs.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.streaming_jobs.list( + api_version="2020-03-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_start(self, resource_group): + response = await ( + await self.client.streaming_jobs.begin_start( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_stop(self, resource_group): + response = await ( + await self.client.streaming_jobs.begin_stop( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_scale(self, resource_group): + response = await ( + await self.client.streaming_jobs.begin_scale( + resource_group_name=resource_group.name, + job_name="str", + api_version="2020-03-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_subscriptions_operations.py new file mode 100644 index 000000000000..220dce47a901 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_subscriptions_operations.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementSubscriptionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_quotas(self, resource_group): + response = self.client.subscriptions.list_quotas( + location="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_subscriptions_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_subscriptions_operations_async.py new file mode 100644 index 000000000000..c5dbcbdaea9e --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_subscriptions_operations_async.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementSubscriptionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_quotas(self, resource_group): + response = await self.client.subscriptions.list_quotas( + location="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_transformations_operations.py new file mode 100644 index 000000000000..6aa42483ac97 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_transformations_operations.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementTransformationsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_replace(self, resource_group): + response = self.client.transformations.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + transformation_name="str", + transformation={ + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.transformations.update( + resource_group_name=resource_group.name, + job_name="str", + transformation_name="str", + transformation={ + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.transformations.get( + resource_group_name=resource_group.name, + job_name="str", + transformation_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_transformations_operations_async.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_transformations_operations_async.py new file mode 100644 index 000000000000..599df12c1169 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_tests/test_stream_analytics_management_transformations_operations_async.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.streamanalytics.aio import StreamAnalyticsManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestStreamAnalyticsManagementTransformationsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(StreamAnalyticsManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_replace(self, resource_group): + response = await self.client.transformations.create_or_replace( + resource_group_name=resource_group.name, + job_name="str", + transformation_name="str", + transformation={ + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.transformations.update( + resource_group_name=resource_group.name, + job_name="str", + transformation_name="str", + transformation={ + "etag": "str", + "id": "str", + "name": "str", + "query": "str", + "streamingUnits": 3, + "type": "str", + "validStreamingUnits": [0], + }, + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.transformations.get( + resource_group_name=resource_group.name, + job_name="str", + transformation_name="str", + api_version="2020-03-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py index 81511cdf3d60..6166caa1d823 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/setup.py @@ -53,11 +53,11 @@ "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "License :: OSI Approved :: MIT License", ], zip_safe=False, @@ -74,10 +74,10 @@ "pytyped": ["py.typed"], }, install_requires=[ - "isodate<1.0.0,>=0.6.1", - "azure-common~=1.1", - "azure-mgmt-core>=1.3.2,<2.0.0", - "typing-extensions>=4.3.0; python_version<'3.8.0'", + "isodate>=0.6.1", + "typing-extensions>=4.6.0", + "azure-common>=1.1", + "azure-mgmt-core>=1.3.2", ], - python_requires=">=3.7", + python_requires=">=3.8", )