From 46106d3c4e5949ec9dab22a62c7b1813423605a9 Mon Sep 17 00:00:00 2001 From: Chenyang Liu Date: Wed, 23 Jul 2025 10:56:20 +1000 Subject: [PATCH 1/2] Support python 3.13 --- src/webpubsub/HISTORY.rst | 4 + src/webpubsub/azext_webpubsub/client.py | 5 +- .../__init__.py | 236 +- .../_client.py | 101 + .../_configuration.py | 69 + .../_models.py | 35 + .../_operations/__init__.py | 25 + .../_operations/_operations.py | 2147 +++++++++++++++++ .../_operations/_patch.py | 877 +++++++ .../_patch.py | 202 ++ .../_policies.py | 83 - .../_serialization.py | 2000 +++++++++++++++ .../_utils.py | 45 - .../_utils/__init__.py | 6 + .../_utils/serialization.py | 2032 ++++++++++++++++ .../_utils/utils.py | 25 + .../_vendor.py | 26 + .../_version.py | 14 +- .../azure_messaging_webpubsubservice/aio.py | 110 - .../aio/__init__.py | 29 + .../aio/_client.py | 103 + .../aio/_configuration.py | 69 + .../aio/_operations/__init__.py | 25 + .../aio/_operations/_operations.py | 1472 +++++++++++ .../aio/_operations/_patch.py | 870 +++++++ .../aio/_patch.py | 83 + .../aio/_vendor.py | 26 + .../core/__init__.py | 0 .../core/rest/__init__.py | 65 - .../core/rest/_rest.py | 625 ----- .../core/rest/_rest_py3.py | 739 ------ .../azure_messaging_webpubsubservice/py.typed | 1 + .../azure_messaging_webpubsubservice/rest.py | 942 -------- src/webpubsub/setup.py | 2 +- 34 files changed, 10258 insertions(+), 2835 deletions(-) create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_client.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_configuration.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_models.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/__init__.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_operations.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_patch.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_patch.py delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_policies.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_serialization.py delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/__init__.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/serialization.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/utils.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_vendor.py delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/__init__.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_client.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_configuration.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/__init__.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_operations.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_patch.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_patch.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_vendor.py delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/__init__.py delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/__init__.py delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest.py delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest_py3.py create mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/py.typed delete mode 100644 src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/rest.py diff --git a/src/webpubsub/HISTORY.rst b/src/webpubsub/HISTORY.rst index 29e9745327b..769b06bed90 100644 --- a/src/webpubsub/HISTORY.rst +++ b/src/webpubsub/HISTORY.rst @@ -3,6 +3,10 @@ Release History =============== +1.7.2 +++++ +* Upgrade `vendored_sdks` to ensure the compatibility with Python 3.13 + 1.7.1 +++++ * Upgrade `websockets` to `13.0.1` to sure the compatibility with Python 3.12 diff --git a/src/webpubsub/azext_webpubsub/client.py b/src/webpubsub/azext_webpubsub/client.py index 8435537de66..918b0635a8c 100644 --- a/src/webpubsub/azext_webpubsub/client.py +++ b/src/webpubsub/azext_webpubsub/client.py @@ -10,7 +10,7 @@ import json import websockets from .vendored_sdks.azure_messaging_webpubsubservice import ( - build_authentication_token + WebPubSubServiceClient ) @@ -54,7 +54,8 @@ async def connect(url): def start_client(client, resource_group_name, webpubsub_name, hub_name, user_id=None): keys = client.list_keys(resource_group_name, webpubsub_name) connection_string = keys.primary_connection_string - token = build_authentication_token(connection_string, hub_name, roles=['webpubsub.sendToGroup', 'webpubsub.joinLeaveGroup'], user=user_id) + service_client = WebPubSubServiceClient.from_connection_string(connection_string, hub_name) + token = service_client.get_client_access_token(roles=['webpubsub.sendToGroup', 'webpubsub.joinLeaveGroup'], user=user_id) asyncio.get_event_loop().run_until_complete(connect(token['url'])) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/__init__.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/__init__.py index 529b1f7e9d7..dd053482fed 100644 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/__init__.py +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/__init__.py @@ -1,232 +1,32 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -__all__ = ["build_authentication_token", "WebPubSubServiceClient"] - -from copy import deepcopy -from datetime import datetime, timedelta from typing import TYPE_CHECKING -import jwt -import six - -import azure.core.credentials as corecredentials -import azure.core.pipeline as corepipeline -import azure.core.pipeline.policies as corepolicies -import azure.core.pipeline.transport as coretransport - - -# Temporary location for types that eventually graduate to Azure Core -from .core import rest as corerest -from ._version import VERSION as _VERSION -from ._policies import JwtCredentialPolicy -from ._utils import UTC as _UTC - if TYPE_CHECKING: - from azure.core.pipeline.policies import HTTPPolicy, SansIOHTTPPolicy - from typing import Any, List, cast, Type, TypeVar - - ClientType = TypeVar("ClientType", bound="WebPubSubServiceClient") - - -def _parse_connection_string(connection_string, **kwargs): - for segment in connection_string.split(";"): - if "=" in segment: - key, value = segment.split("=", maxsplit=1) - key = key.lower() - if key not in ("version", ): - kwargs.setdefault(key, value) - elif segment: - raise ValueError( - "Malformed connection string - expected 'key=value', found segment '{}' in '{}'".format( - segment, connection_string - ) - ) - - if "endpoint" not in kwargs: - raise ValueError("connection_string missing 'endpoint' field") - - if "accesskey" not in kwargs: - raise ValueError("connection_string missing 'accesskey' field") - - return kwargs - -def build_authentication_token(endpoint, hub, **kwargs): - """Build an authentication token for the given endpoint, hub using the provided key. - - :keyword endpoint: connetion string or HTTP or HTTPS endpoint for the WebPubSub service instance. - :type endpoint: ~str - :keyword hub: The hub to give access to. - :type hub: ~str - :keyword accesskey: Key to sign the token with. Required if endpoint is not a connection string - :type accesskey: ~str - :keyword ttl: Optional ttl timedelta for the token. Default is 1 hour. - :type ttl: ~datetime.timedelta - :keyword user: Optional user name (subject) for the token. Default is no user. - :type user: ~str - :keyword roles: Roles for the token. - :type roles: typing.List[str]. Default is no roles. - :returns: ~dict containing the web socket endpoint, the token and a url with the generated access token. - :rtype: ~dict - - - Example: - >>> build_authentication_token(endpoint='https://contoso.com/api/webpubsub', hub='theHub', key='123') - { - 'baseUrl': 'wss://contoso.com/api/webpubsub/client/hubs/theHub', - 'token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ...', - 'url': 'wss://contoso.com/api/webpubsub/client/hubs/theHub?access_token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ...' - } - """ - if 'accesskey' not in kwargs: - kwargs = _parse_connection_string(endpoint, **kwargs) - endpoint = kwargs.pop('endpoint') - - user = kwargs.pop("user", None) - key = kwargs.pop("accesskey") - ttl = kwargs.pop("ttl", timedelta(hours=1)) - roles = kwargs.pop("roles", []) - endpoint = endpoint.lower() - if not endpoint.startswith("http://") and not endpoint.startswith("https://"): - raise ValueError( - "Invalid endpoint: '{}' has unknown scheme - expected 'http://' or 'https://'".format( - endpoint - ) - ) - - # Ensure endpoint has no trailing slash - endpoint = endpoint.rstrip("/") - - # Switch from http(s) to ws(s) scheme - client_endpoint = "ws" + endpoint[4:] - client_url = "{}/client/hubs/{}".format(client_endpoint, hub) - audience = "{}/client/hubs/{}".format(endpoint, hub) - - payload = { - "aud": audience, - "iat": datetime.now(tz=_UTC), - "exp": datetime.now(tz=_UTC) + ttl, - } - if user: - payload["sub"] = user - if roles: - payload["role"] = roles - - token = six.ensure_str(jwt.encode(payload, key, algorithm="HS256")) - return { - "baseUrl": client_url, - "token": token, - "url": "{}?access_token={}".format(client_url, token), - } - - -class WebPubSubServiceClient(object): - def __init__(self, endpoint, credential, **kwargs): - # type: (str, corecredentials.AzureKeyCredential, Any) -> None - """Create a new WebPubSubServiceClient instance - - :param endpoint: Endpoint to connect to. - :type endpoint: ~str - :param credential: Credentials to use to connect to endpoint. - :type credential: ~azure.core.credentials.AzureKeyCredential - :keyword api_version: Api version to use when communicating with the service. - :type api_version: str - :keyword user: User to connect as. Optional. - :type user: ~str - """ - self.endpoint = endpoint.rstrip("/") - transport = kwargs.pop("transport", None) or coretransport.RequestsTransport( - **kwargs - ) - kwargs.setdefault( - "sdk_moniker", "messaging-webpubsubservice/{}".format(_VERSION) - ) - policies = [ - corepolicies.HeadersPolicy(**kwargs), - corepolicies.UserAgentPolicy(**kwargs), - corepolicies.RetryPolicy(**kwargs), - corepolicies.ProxyPolicy(**kwargs), - corepolicies.CustomHookPolicy(**kwargs), - corepolicies.RedirectPolicy(**kwargs), - JwtCredentialPolicy(credential, kwargs.get("user", None)), - corepolicies.NetworkTraceLoggingPolicy(**kwargs), - ] # type: Any - self._pipeline = corepipeline.Pipeline( - transport, - policies, - ) # type: corepipeline.Pipeline - - @classmethod - def from_connection_string(cls, connection_string, **kwargs): - # type: (Type[ClientType], str, Any) -> ClientType - """Create a new WebPubSubServiceClient from a connection string. - - :param connection_string: Connection string - :type connection_string: ~str - :rtype: WebPubSubServiceClient - """ - kwargs = _parse_connection_string(connection_string, **kwargs) - - kwargs["credential"] = corecredentials.AzureKeyCredential( - kwargs.pop("accesskey") - ) - return cls(**kwargs) - - def __repr__(self): - return " endpoint:'{}'".format(self.endpoint) - - def _format_url(self, url): - # type: (str) -> str - assert self.endpoint[-1] != "/", "My endpoint should not have a trailing slash" - return "/".join([self.endpoint, url.lstrip("/")]) - - def send_request(self, http_request, **kwargs): - # type: (corerest.HttpRequest, Any) -> corerest.HttpResponse - """Runs the network request through the client's chained policies. - - We have helper methods to create requests specific to this service in `azure.messaging.webpubsub.rest`. - Use these helper methods to create the request you pass to this method. See our example below: + from ._patch import * # pylint: disable=unused-wildcard-import - >>> from azure.messaging.webpubsub.rest import build_healthapi_get_health_status_request - >>> request = build_healthapi_get_health_status_request(api_version) - - >>> response = client.send_request(request) - +from ._client import WebPubSubServiceClient # type: ignore +from ._version import VERSION - For more information on this code flow, see https://aka.ms/azsdk/python/llcwiki +__version__ = VERSION - For advanced cases, you can also create your own :class:`~azure.messaging.webpubsub.core.rest.HttpRequest` - and pass it in. +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk - :param http_request: The network request you want to make. Required. - :type http_request: ~azure.messaging.webpubsub.core.rest.HttpRequest - :keyword bool stream_response: Whether the response payload will be streamed. Defaults to False. - :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.messaging.webpubsub.core.rest.HttpResponse - """ - request_copy = deepcopy(http_request) - request_copy.url = self._format_url(request_copy.url) +__all__ = [ + "WebPubSubServiceClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - # can't do StreamCOntextManager yet. This client doesn't have a pipeline client, - # StreamContextManager requires a pipeline client. WIll look more into it - # if kwargs.pop("stream_response", False): - # return corerest._StreamContextManager( - # client=self._client, - # request=request_copy, - # ) - pipeline_response = self._pipeline.run(request_copy._internal_request, **kwargs) # pylint: disable=protected-access - response = corerest.HttpResponse( - status_code=pipeline_response.http_response.status_code, - request=request_copy, - _internal_response=pipeline_response.http_response, - ) - response.read() - return response +_patch_sdk() diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_client.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_client.py new file mode 100644 index 00000000000..dc36cb43064 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_client.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import WebPubSubServiceClientConfiguration +from ._operations import WebPubSubServiceClientOperationsMixin +from ._utils.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class WebPubSubServiceClient(WebPubSubServiceClientOperationsMixin): + """WebPubSubServiceClient. + + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. Required. + :type hub: str + :param endpoint: HTTP or HTTPS endpoint for the Web PubSub service instance. Required. + :type endpoint: str + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: Api Version. Default value is "2024-12-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, hub: str, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = WebPubSubServiceClientConfiguration(hub=hub, endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_configuration.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_configuration.py new file mode 100644 index 00000000000..abd1d805a19 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_configuration.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class WebPubSubServiceClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for WebPubSubServiceClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. Required. + :type hub: str + :param endpoint: HTTP or HTTPS endpoint for the Web PubSub service instance. Required. + :type endpoint: str + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: Api Version. Default value is "2024-12-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, hub: str, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01") + + if hub is None: + raise ValueError("Parameter 'hub' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.hub = hub + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://webpubsub.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "messaging-webpubsubservice/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_models.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_models.py new file mode 100644 index 00000000000..df6e9dd4470 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_models.py @@ -0,0 +1,35 @@ +from typing import Optional + +class GroupMember: + """Represents a member in a group. + + :param connection_id: A unique identifier of a connection. + :type connection_id: str + :param user_id: The user ID of the connection. A user can have multiple connections. + :type user_id: Optional[str] + """ + + def __init__(self, *, connection_id: str, user_id: Optional[str] = None) -> None: + self._connection_id = connection_id + self._user_id = user_id + + @property + def connection_id(self) -> str: + """Gets the connection ID. + + :return: The connection ID. + :rtype: str + """ + return self._connection_id + + @property + def user_id(self) -> Optional[str]: + """Gets the user ID. + + :return: The user ID. + :rtype: Optional[str] + """ + return self._user_id + + def __repr__(self) -> str: + return f"GroupMember(connection_id={self._connection_id!r}, user_id={self._user_id!r}, id={id(self)})"[:1024] diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/__init__.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/__init__.py new file mode 100644 index 00000000000..e1be16dd4a3 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import WebPubSubServiceClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "WebPubSubServiceClientOperationsMixin", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_operations.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_operations.py new file mode 100644 index 00000000000..7bea732e487 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_operations.py @@ -0,0 +1,2147 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, cast +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .._configuration import WebPubSubServiceClientConfiguration +from .._utils.serialization import Serializer +from .._utils.utils import ClientMixinABC + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_web_pub_sub_service_close_all_connections_request( # pylint: disable=name-too-long + hub: str, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/:closeConnections" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if excluded is not None: + _params["excluded"] = [_SERIALIZER.query("excluded", q, "str") if q is not None else "" for q in excluded] + if reason is not None: + _params["reason"] = _SERIALIZER.query("reason", reason, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_get_client_access_token_request( # pylint: disable=name-too-long + hub: str, + *, + user_id: Optional[str] = None, + roles: Optional[List[str]] = None, + minutes_to_expire: int = 60, + groups: Optional[List[str]] = None, + client_protocol: str = "Default", + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json, text/json") + + # Construct URL + _url = "/api/hubs/{hub}/:generateToken" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if user_id is not None: + _params["userId"] = _SERIALIZER.query("user_id", user_id, "str") + if roles is not None: + _params["role"] = [_SERIALIZER.query("roles", q, "str") if q is not None else "" for q in roles] + if minutes_to_expire is not None: + _params["minutesToExpire"] = _SERIALIZER.query("minutes_to_expire", minutes_to_expire, "int", minimum=1) + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if groups is not None: + _params["group"] = [_SERIALIZER.query("groups", q, "str") if q is not None else "" for q in groups] + if client_protocol is not None: + _params["clientType"] = _SERIALIZER.query("client_protocol", client_protocol, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_send_to_all_request( # pylint: disable=name-too-long + hub: str, + *, + content: IO[bytes], + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/:send" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if excluded is not None: + _params["excluded"] = [_SERIALIZER.query("excluded", q, "str") if q is not None else "" for q in excluded] + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if filter is not None: + _params["filter"] = _SERIALIZER.query("filter", filter, "str") + if message_ttl_seconds is not None: + _params["messageTtlSeconds"] = _SERIALIZER.query( + "message_ttl_seconds", message_ttl_seconds, "int", maximum=300, minimum=0 + ) + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_web_pub_sub_service_close_connection_request( # pylint: disable=name-too-long + connection_id: str, hub: str, *, reason: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/connections/{connectionId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if reason is not None: + _params["reason"] = _SERIALIZER.query("reason", reason, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_connection_exists_request( # pylint: disable=name-too-long + connection_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + # Construct URL + _url = "/api/hubs/{hub}/connections/{connectionId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs) + + +def build_web_pub_sub_service_send_to_connection_request( # pylint: disable=name-too-long + connection_id: str, hub: str, *, content: IO[bytes], message_ttl_seconds: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/connections/{connectionId}/:send" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if message_ttl_seconds is not None: + _params["messageTtlSeconds"] = _SERIALIZER.query( + "message_ttl_seconds", message_ttl_seconds, "int", maximum=300, minimum=0 + ) + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_web_pub_sub_service_remove_connection_from_all_groups_request( # pylint: disable=name-too-long + connection_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/connections/{connectionId}/groups" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_group_exists_request( # pylint: disable=name-too-long + group: str, hub: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + # Construct URL + _url = "/api/hubs/{hub}/groups/{group}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs) + + +def build_web_pub_sub_service_close_group_connections_request( # pylint: disable=name-too-long + group: str, hub: str, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/groups/{group}/:closeConnections" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if excluded is not None: + _params["excluded"] = [_SERIALIZER.query("excluded", q, "str") if q is not None else "" for q in excluded] + if reason is not None: + _params["reason"] = _SERIALIZER.query("reason", reason, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_send_to_group_request( # pylint: disable=name-too-long + group: str, + hub: str, + *, + content: IO[bytes], + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/groups/{group}/:send" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if excluded is not None: + _params["excluded"] = [_SERIALIZER.query("excluded", q, "str") if q is not None else "" for q in excluded] + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if filter is not None: + _params["filter"] = _SERIALIZER.query("filter", filter, "str") + if message_ttl_seconds is not None: + _params["messageTtlSeconds"] = _SERIALIZER.query( + "message_ttl_seconds", message_ttl_seconds, "int", maximum=300, minimum=0 + ) + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_web_pub_sub_service_list_connections_request( # pylint: disable=name-too-long + group: str, + hub: str, + *, + maxpagesize: Optional[int] = None, + top: Optional[int] = None, + continuation_token_parameter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/groups/{group}/connections" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int", maximum=200, minimum=1) + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int", maximum=2147483647, minimum=1) + if continuation_token_parameter is not None: + _params["continuationToken"] = _SERIALIZER.query( + "continuation_token_parameter", continuation_token_parameter, "str" + ) + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_remove_connection_from_group_request( # pylint: disable=name-too-long + group: str, connection_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/groups/{group}/connections/{connectionId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_add_connection_to_group_request( # pylint: disable=name-too-long + group: str, connection_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/groups/{group}/connections/{connectionId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_revoke_permission_request( # pylint: disable=name-too-long + permission: str, connection_id: str, hub: str, *, target_name: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "permission": _SERIALIZER.url("permission", permission, "str"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if target_name is not None: + _params["targetName"] = _SERIALIZER.query("target_name", target_name, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_has_permission_request( # pylint: disable=name-too-long + permission: str, connection_id: str, hub: str, *, target_name: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + # Construct URL + _url = "/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "permission": _SERIALIZER.url("permission", permission, "str"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if target_name is not None: + _params["targetName"] = _SERIALIZER.query("target_name", target_name, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs) + + +def build_web_pub_sub_service_grant_permission_request( # pylint: disable=name-too-long + permission: str, connection_id: str, hub: str, *, target_name: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "permission": _SERIALIZER.url("permission", permission, "str"), + "connectionId": _SERIALIZER.url("connection_id", connection_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if target_name is not None: + _params["targetName"] = _SERIALIZER.query("target_name", target_name, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_user_exists_request( # pylint: disable=name-too-long + user_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + # Construct URL + _url = "/api/hubs/{hub}/users/{userId}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "userId": _SERIALIZER.url("user_id", user_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, **kwargs) + + +def build_web_pub_sub_service_close_user_connections_request( # pylint: disable=name-too-long + user_id: str, hub: str, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/users/{userId}/:closeConnections" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "userId": _SERIALIZER.url("user_id", user_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + if excluded is not None: + _params["excluded"] = [_SERIALIZER.query("excluded", q, "str") if q is not None else "" for q in excluded] + if reason is not None: + _params["reason"] = _SERIALIZER.query("reason", reason, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_send_to_user_request( # pylint: disable=name-too-long + user_id: str, + hub: str, + *, + content: IO[bytes], + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/users/{userId}/:send" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "userId": _SERIALIZER.url("user_id", user_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if filter is not None: + _params["filter"] = _SERIALIZER.query("filter", filter, "str") + if message_ttl_seconds is not None: + _params["messageTtlSeconds"] = _SERIALIZER.query( + "message_ttl_seconds", message_ttl_seconds, "int", maximum=300, minimum=0 + ) + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, content=content, **kwargs) + + +def build_web_pub_sub_service_remove_user_from_all_groups_request( # pylint: disable=name-too-long + user_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/users/{userId}/groups" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "userId": _SERIALIZER.url("user_id", user_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_remove_user_from_group_request( # pylint: disable=name-too-long + group: str, user_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/users/{userId}/groups/{group}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + "userId": _SERIALIZER.url("user_id", user_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_web_pub_sub_service_add_user_to_group_request( # pylint: disable=name-too-long + group: str, user_id: str, hub: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/api/hubs/{hub}/users/{userId}/groups/{group}" + path_format_arguments = { + "hub": _SERIALIZER.url("hub", hub, "str", pattern=r"^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$"), + "group": _SERIALIZER.url("group", group, "str", max_length=1024, min_length=1, pattern=r"^(?!\s+$).+$"), + "userId": _SERIALIZER.url("user_id", user_id, "str", min_length=1), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class WebPubSubServiceClientOperationsMixin( # pylint: disable=too-many-public-methods + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], WebPubSubServiceClientConfiguration] +): + + @distributed_trace + def close_all_connections( # pylint: disable=inconsistent-return-statements + self, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any + ) -> None: + """Close the connections in the hub. + + Close the connections in the hub. + + :keyword excluded: Exclude these connectionIds when closing the connections in the hub. Default + value is None. + :paramtype excluded: list[str] + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_all_connections_request( + hub=self._config.hub, + excluded=excluded, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def get_client_access_token( + self, + *, + user_id: Optional[str] = None, + roles: Optional[List[str]] = None, + minutes_to_expire: int = 60, + groups: Optional[List[str]] = None, + client_protocol: str = "Default", + **kwargs: Any + ) -> JSON: + """Generate token for the client to connect Azure Web PubSub service. + + Generate token for the client to connect Azure Web PubSub service. + + :keyword user_id: User Id. Default value is None. + :paramtype user_id: str + :keyword roles: Roles that the connection with the generated token will have. Default value is + None. + :paramtype roles: list[str] + :keyword minutes_to_expire: The expire time of the generated token. Default value is 60. + :paramtype minutes_to_expire: int + :keyword groups: Groups that the connection will join when it connects. Default value is None. + :paramtype groups: list[str] + :keyword client_protocol: The type of client. Case-insensitive. If not set, it's "Default". For + Web PubSub for Socket.IO, only the default value is supported. For Web PubSub, the valid values + are 'Default' and 'MQTT'. Known values are: "Default" and "MQTT". Default value is "Default". + :paramtype client_protocol: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "token": "str" + } + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[JSON] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_get_client_access_token_request( + hub=self._config.hub, + user_id=user_id, + roles=roles, + minutes_to_expire=minutes_to_expire, + groups=groups, + client_protocol=client_protocol, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if response.content: + deserialized = response.json() + else: + deserialized = None + + if cls: + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore + + return cast(JSON, deserialized) # type: ignore + + @distributed_trace + def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: IO[bytes], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_all_request( + hub=self._config.hub, + excluded=excluded, + filter=filter, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def close_connection( # pylint: disable=inconsistent-return-statements + self, connection_id: str, *, reason: Optional[str] = None, **kwargs: Any + ) -> None: + """Close the client connection. + + Close the client connection. + + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_connection_request( + connection_id=connection_id, + hub=self._config.hub, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def connection_exists(self, connection_id: str, **kwargs: Any) -> bool: + """Check if the connection with the given connectionId exists. + + Check if the connection with the given connectionId exists. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_connection_exists_request( + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def send_to_connection( # pylint: disable=inconsistent-return-statements + self, connection_id: str, message: IO[bytes], *, message_ttl_seconds: Optional[int] = None, **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_connection_request( + connection_id=connection_id, + hub=self._config.hub, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def remove_connection_from_all_groups( # pylint: disable=inconsistent-return-statements + self, connection_id: str, **kwargs: Any + ) -> None: + """Remove a connection from all groups. + + Remove a connection from all groups. + + :param connection_id: Target connection Id. Required. + :type connection_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_connection_from_all_groups_request( + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def group_exists(self, group: str, **kwargs: Any) -> bool: + """Check if there are any client connections inside the given group. + + Check if there are any client connections inside the given group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_group_exists_request( + group=group, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def close_group_connections( # pylint: disable=inconsistent-return-statements + self, group: str, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any + ) -> None: + """Close connections in the specific group. + + Close connections in the specific group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :keyword excluded: Exclude these connectionIds when closing the connections in the group. + Default value is None. + :paramtype excluded: list[str] + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_group_connections_request( + group=group, + hub=self._config.hub, + excluded=excluded, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: IO[bytes], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_group_request( + group=group, + hub=self._config.hub, + excluded=excluded, + filter=filter, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list_connections( + self, + group: str, + *, + top: Optional[int] = None, + continuation_token_parameter: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged[JSON]: + """List connections in a group. + + List connections in a group. + + :param group: Target group name, whose length should be greater than 0 and less than 1025. + Required. + :type group: str + :keyword top: The maximum number of connections to return. If the value is not set, then all + the connections in a group are returned. Default value is None. + :paramtype top: int + :keyword continuation_token_parameter: A token that allows the client to retrieve the next page + of results. This parameter is provided by the service in the response of a previous request + when there are additional results to be fetched. Clients should include the continuationToken + in the next request to receive the subsequent page of data. If this parameter is omitted, the + server will return the first page of results. Default value is None. + :paramtype continuation_token_parameter: str + :return: An iterator like instance of JSON object + :rtype: ~azure.core.paging.ItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "connectionId": "str", + "userId": "str" + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[JSON] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_web_pub_sub_service_list_connections_request( + group=group, + hub=self._config.hub, + maxpagesize=maxpagesize, + top=top, + continuation_token_parameter=continuation_token_parameter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = deserialized.get("value", []) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def remove_connection_from_group( # pylint: disable=inconsistent-return-statements + self, group: str, connection_id: str, **kwargs: Any + ) -> None: + """Remove a connection from the target group. + + Remove a connection from the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_connection_from_group_request( + group=group, + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def add_connection_to_group( # pylint: disable=inconsistent-return-statements + self, group: str, connection_id: str, **kwargs: Any + ) -> None: + """Add a connection to the target group. + + Add a connection to the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_add_connection_to_group_request( + group=group, + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def revoke_permission( # pylint: disable=inconsistent-return-statements + self, permission: str, connection_id: str, *, target_name: Optional[str] = None, **kwargs: Any + ) -> None: + """Revoke permission for the connection. + + Revoke permission for the connection. + + :param permission: The permission: current supported actions are joinLeaveGroup and + sendToGroup. Known values are: "sendToGroup" and "joinLeaveGroup". Required. + :type permission: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword target_name: The meaning of the target depends on the specific permission. For + joinLeaveGroup and sendToGroup, targetName is a required parameter standing for the group name. + Default value is None. + :paramtype target_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_revoke_permission_request( + permission=permission, + connection_id=connection_id, + hub=self._config.hub, + target_name=target_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def has_permission( + self, permission: str, connection_id: str, *, target_name: Optional[str] = None, **kwargs: Any + ) -> bool: + """Check if a connection has permission to the specified action. + + Check if a connection has permission to the specified action. + + :param permission: The permission: current supported actions are joinLeaveGroup and + sendToGroup. Known values are: "sendToGroup" and "joinLeaveGroup". Required. + :type permission: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword target_name: The meaning of the target depends on the specific permission. For + joinLeaveGroup and sendToGroup, targetName is a required parameter standing for the group name. + Default value is None. + :paramtype target_name: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_has_permission_request( + permission=permission, + connection_id=connection_id, + hub=self._config.hub, + target_name=target_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def grant_permission( # pylint: disable=inconsistent-return-statements + self, permission: str, connection_id: str, *, target_name: Optional[str] = None, **kwargs: Any + ) -> None: + """Grant permission to the connection. + + Grant permission to the connection. + + :param permission: The permission: current supported actions are joinLeaveGroup and + sendToGroup. Known values are: "sendToGroup" and "joinLeaveGroup". Required. + :type permission: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword target_name: The meaning of the target depends on the specific permission. For + joinLeaveGroup and sendToGroup, targetName is a required parameter standing for the group name. + Default value is None. + :paramtype target_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_grant_permission_request( + permission=permission, + connection_id=connection_id, + hub=self._config.hub, + target_name=target_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def user_exists(self, user_id: str, **kwargs: Any) -> bool: + """Check if there are any client connections connected for the given user. + + Check if there are any client connections connected for the given user. + + :param user_id: Target user Id. Required. + :type user_id: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_user_exists_request( + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def close_user_connections( # pylint: disable=inconsistent-return-statements + self, user_id: str, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any + ) -> None: + """Close connections for the specific user. + + Close connections for the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :keyword excluded: Exclude these connectionIds when closing the connections for the user. + Default value is None. + :paramtype excluded: list[str] + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_user_connections_request( + user_id=user_id, + hub=self._config.hub, + excluded=excluded, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: IO[bytes], + *, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_user_request( + user_id=user_id, + hub=self._config.hub, + filter=filter, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def remove_user_from_all_groups( # pylint: disable=inconsistent-return-statements + self, user_id: str, **kwargs: Any + ) -> None: + """Remove a user from all groups. + + Remove a user from all groups. + + :param user_id: Target user Id. Required. + :type user_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_user_from_all_groups_request( + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def remove_user_from_group( # pylint: disable=inconsistent-return-statements + self, group: str, user_id: str, **kwargs: Any + ) -> None: + """Remove a user from the target group. + + Remove a user from the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param user_id: Target user Id. Required. + :type user_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_user_from_group_request( + group=group, + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def add_user_to_group( # pylint: disable=inconsistent-return-statements + self, group: str, user_id: str, **kwargs: Any + ) -> None: + """Add a user to the target group. + + Add a user to the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param user_id: Target user Id. Required. + :type user_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_add_user_to_group_request( + group=group, + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_patch.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_patch.py new file mode 100644 index 00000000000..41629475b5c --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_operations/_patch.py @@ -0,0 +1,877 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Any, List, IO, Optional, Union, overload +from datetime import datetime, timedelta, tzinfo +import jwt +from azure.core.credentials import AzureKeyCredential +from azure.core.paging import ItemPaged +from azure.core.tracing.decorator import distributed_trace +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.utils import case_insensitive_dict +from ._operations import ( + WebPubSubServiceClientOperationsMixin as WebPubSubServiceClientOperationsMixinGenerated, + JSON, + build_web_pub_sub_service_send_to_all_request, + build_web_pub_sub_service_send_to_connection_request, + build_web_pub_sub_service_send_to_user_request, + build_web_pub_sub_service_send_to_group_request, +) +from .._models import GroupMember + + +class _UTC_TZ(tzinfo): + """from https://docs.python.org/2/library/datetime.html#tzinfo-objects""" + + ZERO = timedelta(0) + + def utcoffset(self, dt): + return self.__class__.ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return self.__class__.ZERO + + +def get_token_by_key(endpoint: str, path: str, hub: str, key: str, **kwargs: Any) -> str: + """build token with access key. + :param endpoint: HTTPS endpoint for the WebPubSub service instance. + :type endpoint: str + :param path: HTTPS path for the WebPubSub service instance. + :type endpoint: str + :param hub: The hub to give access to. + :type hub: str + :param key: The access key + :type hub: str + :keyword dict[str, any] jwt_headers: Any headers you want to pass to jwt encoding. + :returns: token + :rtype: str + """ + audience = endpoint + path + hub + user = kwargs.pop("user_id", None) + ttl = timedelta(minutes=kwargs.pop("minutes_to_expire", 60)) + roles = kwargs.pop("roles", []) + groups = kwargs.pop("groups", []) + + payload = { + "aud": audience, + "iat": datetime.now(tz=_UTC_TZ()), + "exp": datetime.now(tz=_UTC_TZ()) + ttl, + } + if user: + payload["sub"] = user + if roles: + payload["role"] = roles + if groups: + payload["webpubsub.group"] = groups + encoded = jwt.encode(payload, key, algorithm="HS256", headers=kwargs.pop("jwt_headers", {})) + return encoded + + +class WebPubSubServiceClientOperationsMixin(WebPubSubServiceClientOperationsMixinGenerated): + @distributed_trace + def get_client_access_token(self, *, client_protocol: Optional[str] = "Default", **kwargs: Any) -> JSON: + """Build an authentication token. + + :keyword user_id: User Id. + :paramtype user_id: str + :keyword roles: Roles that the connection with the generated token will have. + :paramtype roles: list[str] + :keyword minutes_to_expire: The expire time of the generated token. + :paramtype minutes_to_expire: int + :keyword dict[str, any] jwt_headers: Any headers you want to pass to jwt encoding. + :keyword groups: Groups that the connection will join when it connects. Default value is None. + :paramtype groups: list[str] + :keyword client_protocol: The type of client protocol. Case-insensitive. If not set, it's "Default". For Web + PubSub for Socket.IO, "SocketIO" type is supported. For Web PubSub, the valid values are + 'Default', 'MQTT'. Known values are: "Default", "MQTT" and "SocketIO". Default value is "Default". + :paramtype client_type: str + :returns: JSON response containing the web socket endpoint, the token and a url with the generated access token. + :rtype: JSON + + Example: + + >>> get_client_access_token() + { + 'baseUrl': 'wss://contoso.com/api/webpubsub/client/hubs/theHub', + 'token': '...', + 'url': 'wss://contoso.com/api/webpubsub/client/hubs/theHub?access_token=...' + } + """ + endpoint = self._config.endpoint.lower() + if not endpoint.startswith("http://") and not endpoint.startswith("https://"): + raise ValueError( + "Invalid endpoint: '{}' has unknown scheme - expected 'http://' or 'https://'".format(endpoint) + ) + # Ensure endpoint has no trailing slash + + endpoint = endpoint.rstrip("/") + + # Switch from http(s) to ws(s) scheme + + client_endpoint = "ws" + endpoint[4:] + hub = self._config.hub + # Example URL for Default Client Type: https://.webpubsub.azure.com/client/hubs/ + # MQTT Client Type: https://.webpubsub.azure.com/clients/mqtt/hubs/ + # SocketIO Client Type: https://.webpubsub.azure.com/clients/socketio/hubs/ + path = "/client/hubs/" + if client_protocol.lower() == "mqtt": + path = "/clients/mqtt/hubs/" + elif client_protocol.lower() == "socketio": + path = "/clients/socketio/hubs/" + client_url = client_endpoint + path + hub + jwt_headers = kwargs.pop("jwt_headers", {}) + if isinstance(self._config.credential, AzureKeyCredential): + token = get_token_by_key( + endpoint, path, hub, self._config.credential.key, jwt_headers=jwt_headers, **kwargs + ) + else: + token = super().get_client_access_token(client_protocol=client_protocol, **kwargs).get("token") + return { + "baseUrl": client_url, + "token": token, + "url": "{}?access_token={}".format(client_url, token), + } + + get_client_access_token.metadata = {"url": "/api/hubs/{hub}/:generateToken"} # type: ignore + + @distributed_trace + def list_connections( + self, + *, + group: str, + top: Optional[int] = None, + **kwargs: Any + ) -> ItemPaged[GroupMember]: + """List connections in a group. + + List connections in a group. + + :keyword group: Target group name, whose length should be greater than 0 and less than 1025. + Required. + :paramtype group: str + :keyword top: The maximum number of connections to return. If the value is not set, then all + the connections in a group are returned. Default value is None. + :paramtype top: int + :return: An iterator like instance of GroupMember object + :rtype: ~azure.core.paging.ItemPaged[GroupMember] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + connections = client.list_connections( + group="group_name", + top=100 + ) + + for member in connections: + assert member.connection_id is not None + + """ + # Call the base implementation to get ItemPaged[dict] + paged_json = super().list_connections( + group=group, + top=top, + **kwargs + ) + + # Wrap the iterator to convert each item to GroupMember + class GroupMemberPaged(ItemPaged): + def __iter__(self_inner): + for item in paged_json: + yield GroupMember( + connection_id=item.get("connectionId"), + user_id=item.get("userId") + ) + + def by_page(self_inner, continuation_token: Optional[str] = None): + for page in paged_json.by_page(continuation_token=continuation_token): + yield [ + GroupMember( + connection_id=item.get("connectionId"), + user_id=item.get("userId") + ) + for item in page + ] + return GroupMemberPaged() + + @overload + def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: Union[str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: str, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "text/plain", + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: str + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: IO, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: IO + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: Union[IO, str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = _headers.pop("Content-Type", "application/json") if content_type is None else content_type + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_all_request( + hub=self._config.hub, + excluded=excluded, + filter=filter, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + @overload + def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: Union[str, JSON], + *, + filter: Optional[str] = None, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: str, + *, + filter: Optional[str] = None, + content_type: Optional[str] = "text/plain", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: str + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: IO, + *, + filter: Optional[str] = None, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: IO + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: Union[IO, str, JSON], + *, + filter: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = _headers.pop("Content-Type", "application/json") if content_type is None else content_type + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_user_request( + user_id=user_id, + hub=self._config.hub, + filter=filter, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + @overload + def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: Union[str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: str, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "text/plain", + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: str + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: IO, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: IO + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: Union[IO, str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = _headers.pop("Content-Type", "application/json") if content_type is None else content_type + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_group_request( + group=group, + hub=self._config.hub, + excluded=excluded, + filter=filter, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + @overload + def send_to_connection( # pylint: disable=inconsistent-return-statements + self, + connection_id: str, + message: Union[str, JSON], + *, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_connection( # pylint: disable=inconsistent-return-statements + self, connection_id: str, message: str, *, content_type: Optional[str] = "text/plain", **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def send_to_connection( # pylint: disable=inconsistent-return-statements + self, + connection_id: str, + message: IO, + *, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: IO + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def send_to_connection( # pylint: disable=inconsistent-return-statements + self, connection_id: str, message: Union[IO, str, JSON], *, content_type: Optional[str] = None, **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = _headers.pop("Content-Type", "application/json") if content_type is None else content_type + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_connection_request( + connection_id=connection_id, + hub=self._config.hub, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + +__all__: List[str] = [ + "WebPubSubServiceClientOperationsMixin" +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_patch.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_patch.py new file mode 100644 index 00000000000..c51fbcf511c --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_patch.py @@ -0,0 +1,202 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + + +from typing import Any, TYPE_CHECKING, Optional, Union, Awaitable +from datetime import datetime, timedelta +import jwt + +from azure.core.pipeline import PipelineRequest +from azure.core.pipeline.policies import SansIOHTTPPolicy, ProxyPolicy +from azure.core.credentials import AzureKeyCredential + +from ._client import WebPubSubServiceClient as WebPubSubServiceClientGenerated +from ._operations._patch import _UTC_TZ +from ._models import GroupMember + + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + + from azure.core.credentials import TokenCredential + + +def _parse_connection_string(connection_string: str, **kwargs: Any) -> Any: + for segment in connection_string.split(";"): + if "=" in segment: + key, value = segment.split("=", 1) + key = key.lower() + if key not in ("version",): + kwargs.setdefault(key, value) + elif segment: + raise ValueError( + "Malformed connection string - expected 'key=value', found segment '{}' in '{}'".format( + segment, connection_string + ) + ) + if "endpoint" not in kwargs: + raise ValueError("connection_string missing 'endpoint' field") + if "accesskey" not in kwargs: + raise ValueError("connection_string missing 'accesskey' field") + return kwargs + + +class JwtCredentialPolicy(SansIOHTTPPolicy): + + NAME_CLAIM_TYPE = "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name" + + def __init__( + self, + credential: AzureKeyCredential, + *, + user: Optional[str] = None, + origin_endpoint: Optional[str] = None, + reverse_proxy_endpoint: Optional[str] = None, + ) -> None: + """Create a new instance of the policy associated with the given credential. + + :param credential: The azure.core.credentials.AzureKeyCredential instance to use + :type credential: ~azure.core.credentials.AzureKeyCredential + :param user: Optional user name associated with the credential. + :type user: str + """ + self._credential = credential + self._user = user + self._original_url = origin_endpoint + self._reverse_proxy_endpoint = reverse_proxy_endpoint + + def on_request(self, request: PipelineRequest) -> Union[None, Awaitable[None]]: + """Is executed before sending the request from next policy. + + :param request: Request to be modified before sent from next policy. + :type request: ~azure.core.pipeline.PipelineRequest + """ + url = request.http_request.url + if self._reverse_proxy_endpoint: + url = url.replace(self._reverse_proxy_endpoint, self._original_url, 1) + request.http_request.headers["Authorization"] = "Bearer " + self._encode(url) + return super(JwtCredentialPolicy, self).on_request(request) + + def _encode(self, url: AzureKeyCredential) -> str: + data = { + "aud": url, + "exp": datetime.now(tz=_UTC_TZ()) + timedelta(seconds=60), + } + if self._user: + data[self.NAME_CLAIM_TYPE] = self._user + encoded = jwt.encode( + payload=data, + key=self._credential.key, + algorithm="HS256", + ) + return encoded + + +class ApiManagementProxy(ProxyPolicy): + def __init__(self, **kwargs: Any) -> None: + """Create a new instance of the policy. + + :param endpoint: endpoint to be replaced + :type endpoint: str + :param proxy_endpoint: proxy endpoint + :type proxy_endpoint: str + """ + super(ApiManagementProxy, self).__init__(**kwargs) + self._endpoint = kwargs.pop("origin_endpoint", None) + self._reverse_proxy_endpoint = kwargs.pop("reverse_proxy_endpoint", None) + + def on_request(self, request: PipelineRequest) -> None: + """Is executed before sending the request from next policy. + + :param request: Request to be modified before sent from next policy. + :type request: ~azure.core.pipeline.PipelineRequest + """ + super(ApiManagementProxy, self).on_request(request) + if self._endpoint and self._reverse_proxy_endpoint: + request.http_request.url = request.http_request.url.replace(self._endpoint, self._reverse_proxy_endpoint) + + +class WebPubSubServiceClientBase: + """Base class for init""" + + def __init__(self, endpoint: str, hub: str, credential, **kwargs) -> None: + if kwargs.get("port") and endpoint: + endpoint = endpoint.rstrip("/") + ":{}".format(kwargs.pop("port")) + kwargs["origin_endpoint"] = endpoint + if isinstance(credential, AzureKeyCredential): + kwargs["authentication_policy"] = JwtCredentialPolicy( + credential, + user=kwargs.get("user"), + origin_endpoint=kwargs.get("origin_endpoint"), + reverse_proxy_endpoint=kwargs.get("reverse_proxy_endpoint"), + ) + kwargs["proxy_policy"] = kwargs.pop("proxy_policy", ApiManagementProxy(**kwargs)) + super().__init__(endpoint=endpoint, hub=hub, credential=credential, **kwargs) + + +class WebPubSubServiceClient(WebPubSubServiceClientBase, WebPubSubServiceClientGenerated): + """WebPubSubServiceClient. + + :param endpoint: HTTP or HTTPS endpoint for the Web PubSub service instance. + :type endpoint: str + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. + :type hub: str + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential or ~azure.core.credentials.AzureKeyCredential + :keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, endpoint: str, hub: str, credential: Union["TokenCredential", AzureKeyCredential], **kwargs: Any + ) -> None: + super().__init__(endpoint=endpoint, hub=hub, credential=credential, **kwargs) + + @classmethod + def from_connection_string(cls, connection_string: str, hub: str, **kwargs: Any) -> "WebPubSubServiceClient": + """Create a new WebPubSubServiceClient from a connection string. + + :param connection_string: Connection string + :type connection_string: str + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. + :type hub: str + :rtype: WebPubSubServiceClient + """ + kwargs = _parse_connection_string(connection_string, **kwargs) + + credential = AzureKeyCredential(kwargs.pop("accesskey")) + return cls(hub=hub, credential=credential, **kwargs) + + +__all__ = ["WebPubSubServiceClient", "GroupMember"] + + +def patch_sdk(): + pass diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_policies.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_policies.py deleted file mode 100644 index 6dc11981689..00000000000 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_policies.py +++ /dev/null @@ -1,83 +0,0 @@ -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -import datetime -import typing -import jwt -import six - -from azure.core.pipeline.policies import SansIOHTTPPolicy - -from ._utils import UTC - -if typing.TYPE_CHECKING: - from azure.core.credentials import AzureKeyCredential - from azure.core.pipeline import PipelineRequest - - -class JwtCredentialPolicy(SansIOHTTPPolicy): - - NAME_CLAIM_TYPE = "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name" - - def __init__(self, credential, user=None): - # type: (AzureKeyCredential, typing.Optional[str]) -> None - """Create a new instance of the policy associated with the given credential. - - :param credential: The azure.core.credentials.AzureKeyCredential instance to use - :type credential: ~azure.core.credentials.AzureKeyCredential - :param user: Optional user name associated with the credential. - :type user: str - """ - self._credential = credential - self._user = user - - def on_request(self, request): - # type: (PipelineRequest) -> typing.Union[None, typing.Awaitable[None]] - """Is executed before sending the request from next policy. - - :param request: Request to be modified before sent from next policy. - :type request: ~azure.core.pipeline.PipelineRequest - """ - request.http_request.headers["Authorization"] = "Bearer " + self._encode( - request.http_request.url - ) - return super(JwtCredentialPolicy, self).on_request(request) - - def _encode(self, url): - # type: (AzureKeyCredential) -> str - data = { - "aud": url, - "exp": datetime.datetime.now(tz=UTC) + datetime.timedelta(seconds=60), - } - if self._user: - data[self.NAME_CLAIM_TYPE] = self._user - - encoded = jwt.encode( - payload=data, - key=self._credential.key, - algorithm="HS256", - ) - return six.ensure_str(encoded) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_serialization.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_serialization.py new file mode 100644 index 00000000000..8139854b97b --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_serialization.py @@ -0,0 +1,2000 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, + Mapping, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset # type: ignore +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes.""" + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes.""" + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node.""" + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + Remove the polymorphic key from the initial data. + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + """ + return key.replace("\\.", ".") + + +class Serializer(object): + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :keyword bool skip_quote: Whether to skip quote the serialized result. + Defaults to False. + :rtype: str, list + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :keyword bool do_quote: Whether to quote the serialized result of each iterable element. + Defaults to False. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + else: + return date_obj diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils.py deleted file mode 100644 index 042b46dd884..00000000000 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils.py +++ /dev/null @@ -1,45 +0,0 @@ -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -import datetime - - -class _UTC_TZ(datetime.tzinfo): - """from https://docs.python.org/2/library/datetime.html#tzinfo-objects""" - - ZERO = datetime.timedelta(0) - - def utcoffset(self, dt): - return self.__class__.ZERO - - def tzname(self, dt): - return "UTC" - - def dst(self, dt): - return self.__class__.ZERO - - -UTC = _UTC_TZ() diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/__init__.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/__init__.py new file mode 100644 index 00000000000..0af9b28f660 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/serialization.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/serialization.py new file mode 100644 index 00000000000..f5187701d7b --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/utils.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/utils.py new file mode 100644 index 00000000000..39b612f39a9 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_utils/utils.py @@ -0,0 +1,25 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Generic, TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from .serialization import Deserializer, Serializer + + +TClient = TypeVar("TClient") +TConfig = TypeVar("TConfig") + + +class ClientMixinABC(ABC, Generic[TClient, TConfig]): + """DO NOT use this class. It is for internal typing use only.""" + + _client: TClient + _config: TConfig + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_vendor.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_vendor.py new file mode 100644 index 00000000000..d74b846997b --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_vendor.py @@ -0,0 +1,26 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import TYPE_CHECKING + +from ._configuration import WebPubSubServiceClientConfiguration + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core import PipelineClient + + from ._serialization import Deserializer, Serializer + + +class WebPubSubServiceClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "PipelineClient" + _config: WebPubSubServiceClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_version.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_version.py index ac9f392f513..596c8c8dcba 100644 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_version.py +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/_version.py @@ -1,6 +1,10 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- -VERSION = "1.0.0b1" + +VERSION = "1.3.0" diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio.py deleted file mode 100644 index 0559416aa7b..00000000000 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio.py +++ /dev/null @@ -1,110 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -__all__ = ["WebPubSubServiceClient"] - -from typing import TYPE_CHECKING -from copy import deepcopy - -import azure.core.pipeline as corepipeline -import azure.core.pipeline.policies as corepolicies -import azure.core.pipeline.transport as coretransport - -# Temporary location for types that eventually graduate to Azure Core -from .core import rest as corerest - -from ._policies import JwtCredentialPolicy - -if TYPE_CHECKING: - import azure.core.credentials as corecredentials - from azure.core.pipeline.policies import HTTPPolicy, SansIOHTTPPolicy - from typing import Any, List, cast # pylint: disable=ungrouped-imports - - -class WebPubSubServiceClient(object): - def __init__(self, endpoint, credential, **kwargs): - # type: (str, corecredentials.AzureKeyCredential, Any) -> None - """Create a new WebPubSubServiceClient instance - - :param endpoint: Endpoint to connect to. - :type endpoint: ~str - :param credential: Credentials to use to connect to endpoint. - :type credential: ~azure.core.credentials.AzureKeyCredential - :keyword api_version: Api version to use when communicating with the service. - :type api_version: str - :keyword user: User to connect as. Optional. - :type user: ~str - """ - self.endpoint = endpoint.rstrip("/") - transport = kwargs.pop("transport", None) or coretransport.RequestsTransport( - **kwargs - ) - policies = [ - corepolicies.HeadersPolicy(**kwargs), - corepolicies.UserAgentPolicy(**kwargs), - corepolicies.AsyncRetryPolicy(**kwargs), - corepolicies.ProxyPolicy(**kwargs), - corepolicies.CustomHookPolicy(**kwargs), - corepolicies.AsyncRedirectPolicy(**kwargs), - JwtCredentialPolicy(credential, kwargs.get("user", None)), - corepolicies.NetworkTraceLoggingPolicy(**kwargs), - ] # type: Any - self._pipeline = corepipeline.AsyncPipeline( - transport, - policies, - ) # type: corepipeline.AsyncPipeline - - def _format_url(self, url): - # type: (str) -> str - assert self.endpoint[-1] != "/", "My endpoint should not have a trailing slash" - return "/".join([self.endpoint, url.lstrip("/")]) - - async def send_request( - self, http_request: corerest.HttpRequest, **kwargs: "Any" - ) -> corerest.AsyncHttpResponse: - """Runs the network request through the client's chained policies. - - We have helper methods to create requests specific to this service in `azure.messaging.webpubsub.rest`. - Use these helper methods to create the request you pass to this method. See our example below: - - >>> from azure.messaging.webpubsub.rest import build_healthapi_get_health_status_request - >>> request = build_healthapi_get_health_status_request(api_version) - - >>> response = await client.send_request(request) - - - For more information on this code flow, see https://aka.ms/azsdk/python/llcwiki - - For advanced cases, you can also create your own :class:`~azure.messaging.webpubsub.core.rest.HttpRequest` - and pass it in. - - :param http_request: The network request you want to make. Required. - :type http_request: ~azure.messaging.webpubsub.core.rest.HttpRequest - :keyword bool stream_response: Whether the response payload will be streamed. Defaults to False. - :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.messaging.webpubsub.core.rest.AsyncHttpResponse - """ - request_copy = deepcopy(http_request) - request_copy.url = self._format_url(request_copy.url) - - # can't do AsyncStreamContextManager yet. This client doesn't have a pipeline client, - # AsyncStreamContextManager requires a pipeline client. WIll look more into it - # if kwargs.pop("stream_response", False): - # return corerest._AsyncStreamContextManager( - # client=self._client, - # request=request_copy, - # ) - pipeline_response = await self._pipeline.run( - request_copy._internal_request, **kwargs # pylint: disable=protected-access - ) - response = corerest.AsyncHttpResponse( - status_code=pipeline_response.http_response.status_code, - request=request_copy, - _internal_response=pipeline_response.http_response, - ) - await response.read() - return response diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/__init__.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/__init__.py new file mode 100644 index 00000000000..1403cb686cd --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import WebPubSubServiceClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "WebPubSubServiceClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_client.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_client.py new file mode 100644 index 00000000000..e7606491304 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_client.py @@ -0,0 +1,103 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import WebPubSubServiceClientConfiguration +from ._operations import WebPubSubServiceClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class WebPubSubServiceClient(WebPubSubServiceClientOperationsMixin): + """WebPubSubServiceClient. + + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. Required. + :type hub: str + :param endpoint: HTTP or HTTPS endpoint for the Web PubSub service instance. Required. + :type endpoint: str + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: Api Version. Default value is "2024-12-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, hub: str, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = WebPubSubServiceClientConfiguration(hub=hub, endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_configuration.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_configuration.py new file mode 100644 index 00000000000..e36ce191a02 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_configuration.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class WebPubSubServiceClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for WebPubSubServiceClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. Required. + :type hub: str + :param endpoint: HTTP or HTTPS endpoint for the Web PubSub service instance. Required. + :type endpoint: str + :param credential: Credential needed for the client to connect to Azure. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: Api Version. Default value is "2024-12-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, hub: str, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01") + + if hub is None: + raise ValueError("Parameter 'hub' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.hub = hub + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://webpubsub.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "messaging-webpubsubservice/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/__init__.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/__init__.py new file mode 100644 index 00000000000..e1be16dd4a3 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import WebPubSubServiceClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "WebPubSubServiceClientOperationsMixin", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_operations.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_operations.py new file mode 100644 index 00000000000..5d824d7ab99 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_operations.py @@ -0,0 +1,1472 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, cast +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ..._operations._operations import ( + build_web_pub_sub_service_add_connection_to_group_request, + build_web_pub_sub_service_add_user_to_group_request, + build_web_pub_sub_service_close_all_connections_request, + build_web_pub_sub_service_close_connection_request, + build_web_pub_sub_service_close_group_connections_request, + build_web_pub_sub_service_close_user_connections_request, + build_web_pub_sub_service_connection_exists_request, + build_web_pub_sub_service_get_client_access_token_request, + build_web_pub_sub_service_grant_permission_request, + build_web_pub_sub_service_group_exists_request, + build_web_pub_sub_service_has_permission_request, + build_web_pub_sub_service_list_connections_request, + build_web_pub_sub_service_remove_connection_from_all_groups_request, + build_web_pub_sub_service_remove_connection_from_group_request, + build_web_pub_sub_service_remove_user_from_all_groups_request, + build_web_pub_sub_service_remove_user_from_group_request, + build_web_pub_sub_service_revoke_permission_request, + build_web_pub_sub_service_send_to_all_request, + build_web_pub_sub_service_send_to_connection_request, + build_web_pub_sub_service_send_to_group_request, + build_web_pub_sub_service_send_to_user_request, + build_web_pub_sub_service_user_exists_request, +) +from ..._utils.utils import ClientMixinABC +from .._configuration import WebPubSubServiceClientConfiguration + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class WebPubSubServiceClientOperationsMixin( # pylint: disable=too-many-public-methods + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], WebPubSubServiceClientConfiguration] +): + + @distributed_trace_async + async def close_all_connections( + self, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any + ) -> None: + """Close the connections in the hub. + + Close the connections in the hub. + + :keyword excluded: Exclude these connectionIds when closing the connections in the hub. Default + value is None. + :paramtype excluded: list[str] + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_all_connections_request( + hub=self._config.hub, + excluded=excluded, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def get_client_access_token( + self, + *, + user_id: Optional[str] = None, + roles: Optional[List[str]] = None, + minutes_to_expire: int = 60, + groups: Optional[List[str]] = None, + client_protocol: str = "Default", + **kwargs: Any + ) -> JSON: + """Generate token for the client to connect Azure Web PubSub service. + + Generate token for the client to connect Azure Web PubSub service. + + :keyword user_id: User Id. Default value is None. + :paramtype user_id: str + :keyword roles: Roles that the connection with the generated token will have. Default value is + None. + :paramtype roles: list[str] + :keyword minutes_to_expire: The expire time of the generated token. Default value is 60. + :paramtype minutes_to_expire: int + :keyword groups: Groups that the connection will join when it connects. Default value is None. + :paramtype groups: list[str] + :keyword client_protocol: The type of client. Case-insensitive. If not set, it's "Default". For + Web PubSub for Socket.IO, only the default value is supported. For Web PubSub, the valid values + are 'Default' and 'MQTT'. Known values are: "Default" and "MQTT". Default value is "Default". + :paramtype client_protocol: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "token": "str" + } + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[JSON] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_get_client_access_token_request( + hub=self._config.hub, + user_id=user_id, + roles=roles, + minutes_to_expire=minutes_to_expire, + groups=groups, + client_protocol=client_protocol, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if response.content: + deserialized = response.json() + else: + deserialized = None + + if cls: + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore + + return cast(JSON, deserialized) # type: ignore + + @distributed_trace_async + async def send_to_all( + self, + message: IO[bytes], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_all_request( + hub=self._config.hub, + excluded=excluded, + filter=filter, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def close_connection(self, connection_id: str, *, reason: Optional[str] = None, **kwargs: Any) -> None: + """Close the client connection. + + Close the client connection. + + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_connection_request( + connection_id=connection_id, + hub=self._config.hub, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def connection_exists(self, connection_id: str, **kwargs: Any) -> bool: + """Check if the connection with the given connectionId exists. + + Check if the connection with the given connectionId exists. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_connection_exists_request( + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def send_to_connection( + self, connection_id: str, message: IO[bytes], *, message_ttl_seconds: Optional[int] = None, **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_connection_request( + connection_id=connection_id, + hub=self._config.hub, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def remove_connection_from_all_groups(self, connection_id: str, **kwargs: Any) -> None: + """Remove a connection from all groups. + + Remove a connection from all groups. + + :param connection_id: Target connection Id. Required. + :type connection_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_connection_from_all_groups_request( + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def group_exists(self, group: str, **kwargs: Any) -> bool: + """Check if there are any client connections inside the given group. + + Check if there are any client connections inside the given group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_group_exists_request( + group=group, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def close_group_connections( + self, group: str, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any + ) -> None: + """Close connections in the specific group. + + Close connections in the specific group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :keyword excluded: Exclude these connectionIds when closing the connections in the group. + Default value is None. + :paramtype excluded: list[str] + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_group_connections_request( + group=group, + hub=self._config.hub, + excluded=excluded, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def send_to_group( + self, + group: str, + message: IO[bytes], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_group_request( + group=group, + hub=self._config.hub, + excluded=excluded, + filter=filter, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list_connections( + self, + group: str, + *, + top: Optional[int] = None, + continuation_token_parameter: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged[JSON]: + """List connections in a group. + + List connections in a group. + + :param group: Target group name, whose length should be greater than 0 and less than 1025. + Required. + :type group: str + :keyword top: The maximum number of connections to return. If the value is not set, then all + the connections in a group are returned. Default value is None. + :paramtype top: int + :keyword continuation_token_parameter: A token that allows the client to retrieve the next page + of results. This parameter is provided by the service in the response of a previous request + when there are additional results to be fetched. Clients should include the continuationToken + in the next request to receive the subsequent page of data. If this parameter is omitted, the + server will return the first page of results. Default value is None. + :paramtype continuation_token_parameter: str + :return: An iterator like instance of JSON object + :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "connectionId": "str", + "userId": "str" + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[JSON] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_web_pub_sub_service_list_connections_request( + group=group, + hub=self._config.hub, + maxpagesize=maxpagesize, + top=top, + continuation_token_parameter=continuation_token_parameter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = deserialized.get("value", []) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def remove_connection_from_group(self, group: str, connection_id: str, **kwargs: Any) -> None: + """Remove a connection from the target group. + + Remove a connection from the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_connection_from_group_request( + group=group, + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def add_connection_to_group(self, group: str, connection_id: str, **kwargs: Any) -> None: + """Add a connection to the target group. + + Add a connection to the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_add_connection_to_group_request( + group=group, + connection_id=connection_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def revoke_permission( + self, permission: str, connection_id: str, *, target_name: Optional[str] = None, **kwargs: Any + ) -> None: + """Revoke permission for the connection. + + Revoke permission for the connection. + + :param permission: The permission: current supported actions are joinLeaveGroup and + sendToGroup. Known values are: "sendToGroup" and "joinLeaveGroup". Required. + :type permission: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword target_name: The meaning of the target depends on the specific permission. For + joinLeaveGroup and sendToGroup, targetName is a required parameter standing for the group name. + Default value is None. + :paramtype target_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_revoke_permission_request( + permission=permission, + connection_id=connection_id, + hub=self._config.hub, + target_name=target_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def has_permission( + self, permission: str, connection_id: str, *, target_name: Optional[str] = None, **kwargs: Any + ) -> bool: + """Check if a connection has permission to the specified action. + + Check if a connection has permission to the specified action. + + :param permission: The permission: current supported actions are joinLeaveGroup and + sendToGroup. Known values are: "sendToGroup" and "joinLeaveGroup". Required. + :type permission: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword target_name: The meaning of the target depends on the specific permission. For + joinLeaveGroup and sendToGroup, targetName is a required parameter standing for the group name. + Default value is None. + :paramtype target_name: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_has_permission_request( + permission=permission, + connection_id=connection_id, + hub=self._config.hub, + target_name=target_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def grant_permission( + self, permission: str, connection_id: str, *, target_name: Optional[str] = None, **kwargs: Any + ) -> None: + """Grant permission to the connection. + + Grant permission to the connection. + + :param permission: The permission: current supported actions are joinLeaveGroup and + sendToGroup. Known values are: "sendToGroup" and "joinLeaveGroup". Required. + :type permission: str + :param connection_id: Target connection Id. Required. + :type connection_id: str + :keyword target_name: The meaning of the target depends on the specific permission. For + joinLeaveGroup and sendToGroup, targetName is a required parameter standing for the group name. + Default value is None. + :paramtype target_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_grant_permission_request( + permission=permission, + connection_id=connection_id, + hub=self._config.hub, + target_name=target_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def user_exists(self, user_id: str, **kwargs: Any) -> bool: + """Check if there are any client connections connected for the given user. + + Check if there are any client connections connected for the given user. + + :param user_id: Target user Id. Required. + :type user_id: str + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_user_exists_request( + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def close_user_connections( + self, user_id: str, *, excluded: Optional[List[str]] = None, reason: Optional[str] = None, **kwargs: Any + ) -> None: + """Close connections for the specific user. + + Close connections for the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :keyword excluded: Exclude these connectionIds when closing the connections for the user. + Default value is None. + :paramtype excluded: list[str] + :keyword reason: The reason closing the client connection. Default value is None. + :paramtype reason: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_close_user_connections_request( + user_id=user_id, + hub=self._config.hub, + excluded=excluded, + reason=reason, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def send_to_user( + self, + user_id: str, + message: IO[bytes], + *, + filter: Optional[str] = None, + message_ttl_seconds: Optional[int] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: IO[bytes] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword message_ttl_seconds: The time-to-live (TTL) value in seconds for messages sent to the + service. 0 is the default value, which means the message never expires. 300 is the maximum + value. If this parameter is non-zero, messages that are not consumed by the client within the + specified TTL will be dropped by the service. This parameter can help when the client's + bandwidth is limited. Default value is None. + :paramtype message_ttl_seconds: int + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = message + + _request = build_web_pub_sub_service_send_to_user_request( + user_id=user_id, + hub=self._config.hub, + filter=filter, + message_ttl_seconds=message_ttl_seconds, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def remove_user_from_all_groups(self, user_id: str, **kwargs: Any) -> None: + """Remove a user from all groups. + + Remove a user from all groups. + + :param user_id: Target user Id. Required. + :type user_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_user_from_all_groups_request( + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def remove_user_from_group(self, group: str, user_id: str, **kwargs: Any) -> None: + """Remove a user from the target group. + + Remove a user from the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param user_id: Target user Id. Required. + :type user_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_remove_user_from_group_request( + group=group, + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def add_user_to_group(self, group: str, user_id: str, **kwargs: Any) -> None: + """Add a user to the target group. + + Add a user to the target group. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param user_id: Target user Id. Required. + :type user_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_web_pub_sub_service_add_user_to_group_request( + group=group, + user_id=user_id, + hub=self._config.hub, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_patch.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_patch.py new file mode 100644 index 00000000000..d43e305b7f1 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_operations/_patch.py @@ -0,0 +1,870 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Any, Union, Optional, Dict, List, IO, overload + +from azure.core.credentials import AzureKeyCredential +from azure.core.async_paging import AsyncItemPaged +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + + +from ._operations import ( + WebPubSubServiceClientOperationsMixin as WebPubSubServiceClientOperationsMixinGenerated, + JSON, + build_web_pub_sub_service_send_to_all_request, + build_web_pub_sub_service_send_to_connection_request, + build_web_pub_sub_service_send_to_user_request, + build_web_pub_sub_service_send_to_group_request, +) +from ..._operations._patch import get_token_by_key +from ..._models import GroupMember + +class WebPubSubServiceClientOperationsMixin(WebPubSubServiceClientOperationsMixinGenerated): + @distributed_trace_async + async def get_client_access_token( # pylint: disable=arguments-differ + self, + *, + user_id: Optional[str] = None, + roles: Optional[List[str]] = None, + minutes_to_expire: Optional[int] = 60, + jwt_headers: Dict[str, Any] = None, + groups: Optional[List[str]] = None, + client_protocol: Optional[str] = "Default", + **kwargs: Any + ) -> JSON: + """Generate token for the client to connect Azure Web PubSub service. + + :keyword user_id: User Id. + :paramtype user_id: str + :keyword roles: Roles that the connection with the generated token will have. + :paramtype roles: list[str] + :keyword minutes_to_expire: The expire time of the generated token. + :paramtype minutes_to_expire: int + :keyword dict[str, any] jwt_headers: Any headers you want to pass to jwt encoding. + :keyword groups: Groups that the connection will join when it connects. Default value is None. + :paramtype groups: list[str] + :keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + :keyword client_protocol: The type of client protocol. Case-insensitive. If not set, it's "Default". For Web + PubSub for Socket.IO, "SocketIO" type is supported. For Web PubSub, the valid values are + 'Default', 'MQTT'. Known values are: "Default", "MQTT" and "SocketIO". Default value is "Default". + :paramtype client_type: str + :return: JSON object + :rtype: JSON + :raises: ~azure.core.exceptions.HttpResponseError + + Example: + + >>> get_client_access_token() + { + 'baseUrl': 'wss://contoso.com/api/webpubsub/client/hubs/theHub', + 'token': '...', + 'url': 'wss://contoso.com/api/webpubsub/client/hubs/theHub?access_token=...' + } + """ + endpoint = self._config.endpoint.lower() + if not endpoint.startswith("http://") and not endpoint.startswith("https://"): + raise ValueError( + "Invalid endpoint: '{}' has unknown scheme - expected 'http://' or 'https://'".format(endpoint) + ) + # Ensure endpoint has no trailing slash + + endpoint = endpoint.rstrip("/") + + # Switch from http(s) to ws(s) scheme + + client_endpoint = "ws" + endpoint[4:] + hub = self._config.hub + path = "/client/hubs/" + if client_protocol.lower() == "mqtt": + path = "/clients/mqtt/hubs/" + elif client_protocol.lower() == "socketio": + path = "/clients/socketio/hubs/" + client_url = client_endpoint + path + hub + if isinstance(self._config.credential, AzureKeyCredential): + token = get_token_by_key( + endpoint, + path, + hub, + self._config.credential.key, + user_id=user_id, + roles=roles, + minutes_to_expire=minutes_to_expire, + jwt_headers=jwt_headers or {}, + groups=groups, + **kwargs + ) + else: + access_token = await super().get_client_access_token( + user_id=user_id, + roles=roles, + minutes_to_expire=minutes_to_expire, + groups=groups, + client_protocol=client_protocol, + **kwargs + ) + token = access_token.get("token") + return { + "baseUrl": client_url, + "token": token, + "url": "{}?access_token={}".format(client_url, token), + } + + get_client_access_token.metadata = {"url": "/api/hubs/{hub}/:generateToken"} # type: ignore + + @distributed_trace + def list_connections( + self, + *, + group: str, + top: Optional[int] = None, + **kwargs: Any + ) -> AsyncItemPaged[GroupMember]: + """List connections in a group. + + List connections in a group. + + :keyword group: Target group name, whose length should be greater than 0 and less than 1025. + Required. + :paramtype group: str + :keyword top: The maximum number of connections to return. If the value is not set, then all + the connections in a group are returned. Default value is None. + :paramtype top: int + :return: An iterator like instance of GroupMember object + :rtype: ~azure.core.async_paging.AsyncItemPaged[GroupMember] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + connections = client.list_connections( + group="group_name", + top=100 + ) + + async for member in connections: + assert member.connection_id is not None + + + """ + paged_json = super().list_connections( + group=group, + top=top, + **kwargs + ) + + class GroupMemberPaged(AsyncItemPaged): + def __aiter__(self_inner): + async def generator(): + async for item in paged_json: + yield GroupMember( + connection_id=item.get("connectionId"), + user_id=item.get("userId") + ) + return generator() + + def by_page(self_inner, continuation_token: Optional[str] = None): + async def page_generator(): + async for page in paged_json.by_page(continuation_token=continuation_token): + async def group_member_page(): + async for item in page: + yield GroupMember( + connection_id=item.get("connectionId"), + user_id=item.get("userId") + ) + yield group_member_page() + return page_generator() + + return GroupMemberPaged() + + + @overload + async def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: Union[str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: str, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "text/plain", + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: str + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: IO, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: IO + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def send_to_all( # pylint: disable=inconsistent-return-statements + self, + message: Union[IO, str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """Broadcast content inside request body to all the connected client connections. + + Broadcast content inside request body to all the connected client connections. + + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = ( + _headers.pop("Content-Type", "application/json") if content_type is None else content_type + ) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_all_request( + hub=self._config.hub, + excluded=excluded, + filter=filter, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + @overload + async def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: Union[str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: str, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "text/plain", + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: str + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: IO, + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: IO + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def send_to_group( # pylint: disable=inconsistent-return-statements + self, + group: str, + message: Union[IO, str, JSON], + *, + excluded: Optional[List[str]] = None, + filter: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to a group of connections. + + Send content inside request body to a group of connections. + + :param group: Target group name, which length should be greater than 0 and less than 1025. + Required. + :type group: str + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword excluded: Excluded connection Ids. Default value is None. + :paramtype excluded: list[str] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = ( + _headers.pop("Content-Type", "application/json") if content_type is None else content_type + ) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_group_request( + group=group, + hub=self._config.hub, + excluded=excluded, + filter=filter, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + @overload + async def send_to_connection( # pylint: disable=inconsistent-return-statements + self, + connection_id: str, + message: Union[str, JSON], + *, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_connection( # pylint: disable=inconsistent-return-statements + self, connection_id: str, message: str, *, content_type: Optional[str] = "text/plain", **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_connection( # pylint: disable=inconsistent-return-statements + self, + connection_id: str, + message: IO, + *, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: IO + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def send_to_connection( # pylint: disable=inconsistent-return-statements + self, connection_id: str, message: Union[IO, str, JSON], *, content_type: Optional[str] = None, **kwargs: Any + ) -> None: + """Send content inside request body to the specific connection. + + Send content inside request body to the specific connection. + + :param connection_id: The connection Id. Required. + :type connection_id: str + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = ( + _headers.pop("Content-Type", "application/json") if content_type is None else content_type + ) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_connection_request( + connection_id=connection_id, + hub=self._config.hub, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + @overload + async def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: Union[str, JSON], + *, + filter: Optional[str] = None, + content_type: Optional[str] = "application/json", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: Union[str, JSON] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: str, + *, + filter: Optional[str] = None, + content_type: Optional[str] = "text/plain", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: str + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: IO, + *, + filter: Optional[str] = None, + content_type: Optional[str] = "application/octet-stream", + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: IO + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def send_to_user( # pylint: disable=inconsistent-return-statements + self, + user_id: str, + message: Union[IO, str, JSON], + *, + filter: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """Send content inside request body to the specific user. + + Send content inside request body to the specific user. + + :param user_id: The user Id. Required. + :type user_id: str + :param message: The payload body. Required. + :type message: Union[IO, str, JSON] + :keyword filter: Following OData filter syntax to filter out the subscribers receiving the + messages. Default value is None. + :paramtype filter: str + :keyword content_type: The content type of the payload. Default value is None. Allowed values are 'application/json', 'application/octet-stream' and 'text/plain' + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = ( + _headers.pop("Content-Type", "application/json") if content_type is None else content_type + ) # type: str + cls = kwargs.pop("cls", None) # type: ClsType[None] + + _json = None + _content = None + content_type = content_type or "" + if content_type.split(";")[0] in ["application/json"]: + _json = message + elif content_type.split(";")[0] in ["application/octet-stream", "text/plain"]: + _content = message + else: + raise ValueError( + "The content_type '{}' is not one of the allowed values: " + "['application/json', 'application/octet-stream', 'text/plain']".format(content_type) + ) + request = build_web_pub_sub_service_send_to_user_request( + user_id=user_id, + hub=self._config.hub, + filter=filter, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + json=_json, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + if cls: + return cls(pipeline_response, None, {}) + + +__all__: List[str] = [ + "WebPubSubServiceClientOperationsMixin" +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_patch.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_patch.py new file mode 100644 index 00000000000..c09f212bfca --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_patch.py @@ -0,0 +1,83 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + + +from typing import Any, TYPE_CHECKING, Union + +from azure.core.credentials import AzureKeyCredential + + +from .._patch import _parse_connection_string, WebPubSubServiceClientBase +from ._client import WebPubSubServiceClient as WebPubSubServiceClientGenerated + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class WebPubSubServiceClient(WebPubSubServiceClientBase, WebPubSubServiceClientGenerated): + """WebPubSubServiceClient. + + :param endpoint: HTTP or HTTPS endpoint for the Web PubSub service instance. + :type endpoint: str + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. + :type hub: str + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential or ~azure.core.credentials.AzureKeyCredential + :keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, endpoint: str, hub: str, credential: Union["AsyncTokenCredential", AzureKeyCredential], **kwargs: Any + ) -> None: + super().__init__(endpoint=endpoint, hub=hub, credential=credential, **kwargs) + + @classmethod + def from_connection_string(cls, connection_string: str, hub: str, **kwargs: Any) -> "WebPubSubServiceClient": + """Create a new WebPubSubServiceClient from a connection string. + + :param connection_string: Connection string + :type connection_string: ~str + :param hub: Target hub name, which should start with alphabetic characters and only contain + alpha-numeric characters or underscore. + :type hub: str + :rtype: WebPubSubServiceClient + """ + kwargs = _parse_connection_string(connection_string, **kwargs) + + credential = AzureKeyCredential(kwargs.pop("accesskey")) + return cls(hub=hub, credential=credential, **kwargs) + + +__all__ = ["WebPubSubServiceClient"] + + +def patch_sdk(): + pass diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_vendor.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_vendor.py new file mode 100644 index 00000000000..3da089d49e7 --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/aio/_vendor.py @@ -0,0 +1,26 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import TYPE_CHECKING + +from ._configuration import WebPubSubServiceClientConfiguration + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core import AsyncPipelineClient + + from .._serialization import Deserializer, Serializer + + +class WebPubSubServiceClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "AsyncPipelineClient" + _config: WebPubSubServiceClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/__init__.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/__init__.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/__init__.py deleted file mode 100644 index a70aae7c472..00000000000 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- -try: - from ._rest_py3 import ( - HttpRequest, - HttpResponse, - AsyncHttpResponse, - _StreamContextManager, - _AsyncStreamContextManager, - StreamConsumedError, - ResponseNotReadError, - ResponseClosedError, - ) - - __all__ = [ - "HttpRequest", - "HttpResponse", - "AsyncHttpResponse", - "_StreamContextManager", - "_AsyncStreamContextManager", - "StreamConsumedError", - "ResponseNotReadError", - "ResponseClosedError", - ] -except (SyntaxError, ImportError): - from ._rest import ( - HttpRequest, - HttpResponse, - _StreamContextManager, - StreamConsumedError, - ResponseNotReadError, - ResponseClosedError, - ) - - __all__ = [ - "HttpRequest", - "HttpResponse", - "_StreamContextManager", - "StreamConsumedError", - "ResponseNotReadError", - "ResponseClosedError", - ] diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest.py deleted file mode 100644 index cee1c039cbd..00000000000 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest.py +++ /dev/null @@ -1,625 +0,0 @@ -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# pylint currently complains about typing.Union not being subscriptable -# pylint: disable=unsubscriptable-object - -import codecs -import json -from enum import Enum -import xml.etree.ElementTree as ET -from typing import TYPE_CHECKING, Iterable - -import cgi -import six - -from azure.core.exceptions import HttpResponseError -from azure.core.pipeline.transport import ( - HttpRequest as _PipelineTransportHttpRequest, -) - - -if TYPE_CHECKING: - from typing import ( # pylint: disable=ungrouped-imports - Any, - Optional, - Union, - Mapping, - Sequence, - Tuple, - Iterator, - ) - - ByteStream = Iterable[bytes] - - HeadersType = Union[Mapping[str, str], Sequence[Tuple[str, str]]] - ContentType = Union[str, bytes, ByteStream] - from azure.core.pipeline.transport._base import ( - _HttpResponseBase as _PipelineTransportHttpResponseBase, - ) - from azure.core._pipeline_client import PipelineClient as _PipelineClient - - -class HttpVerbs(str, Enum): - GET = "GET" - PUT = "PUT" - POST = "POST" - HEAD = "HEAD" - PATCH = "PATCH" - DELETE = "DELETE" - MERGE = "MERGE" - - -########################### UTILS SECTION ################################# - - -def _is_stream_or_str_bytes(content): - return isinstance(content, (str, bytes)) or any( - hasattr(content, attr) for attr in ["read", "__iter__", "__aiter__"] - ) - - -def _lookup_encoding(encoding): - # type: (str) -> bool - # including check for whether encoding is known taken from httpx - try: - codecs.lookup(encoding) - return True - except LookupError: - return False - - -def _set_content_length_header(header_name, header_value, internal_request): - # type: (str, str, _PipelineTransportHttpRequest) -> None - valid_methods = ["put", "post", "patch"] - content_length_headers = ["Content-Length", "Transfer-Encoding"] - if internal_request.method.lower() in valid_methods and not any( - [c for c in content_length_headers if c in internal_request.headers] - ): - internal_request.headers[header_name] = header_value - - -def _set_content_type_header(header_value, internal_request): - # type: (str, _PipelineTransportHttpRequest) -> None - if not internal_request.headers.get("Content-Type"): - internal_request.headers["Content-Type"] = header_value - - -def _set_content_body(content, internal_request): - # type: (ContentType, _PipelineTransportHttpRequest) -> None - headers = internal_request.headers - content_type = headers.get("Content-Type") - if _is_stream_or_str_bytes(content): - # stream will be bytes / str, or iterator of bytes / str - internal_request.set_streamed_data_body(content) - if isinstance(content, (str, bytes)) and content: - _set_content_length_header( - "Content-Length", str(len(internal_request.data)), internal_request - ) - if isinstance(content, six.string_types): - _set_content_type_header("text/plain", internal_request) - else: - _set_content_type_header("application/octet-stream", internal_request) - elif isinstance( # pylint: disable=isinstance-second-argument-not-valid-type - content, Iterable - ): - # _set_content_length_header("Transfer-Encoding", "chunked", internal_request) - _set_content_type_header("application/octet-stream", internal_request) - elif isinstance(content, ET.Element): - # XML body - internal_request.set_xml_body(content) - _set_content_type_header("application/xml", internal_request) - _set_content_length_header( - "Content-Length", str(len(internal_request.data)), internal_request - ) - elif content_type and content_type.startswith("text/"): - # Text body - internal_request.set_text_body(content) - _set_content_length_header( - "Content-Length", str(len(internal_request.data)), internal_request - ) - else: - # Other body - internal_request.data = content - internal_request.headers = headers - - -def _set_body(content, data, files, json_body, internal_request): - # type: (ContentType, dict, Any, Any, _PipelineTransportHttpRequest) -> None - if data is not None and not isinstance(data, dict): - content = data - data = None - if content is not None: - _set_content_body(content, internal_request) - elif json_body is not None: - internal_request.set_json_body(json_body) - _set_content_type_header("application/json", internal_request) - elif files is not None: - internal_request.set_formdata_body(files) - # if you don't supply your content type, we'll create a boundary for you with multipart/form-data - # boundary = binascii.hexlify(os.urandom(16)).decode("ascii") # got logic from httpx, thanks httpx! - # _set_content_type_header("multipart/form-data; boundary={}".format(boundary), internal_request) - elif data: - _set_content_type_header("application/x-www-form-urlencoded", internal_request) - internal_request.set_formdata_body(data) - # need to set twice because Content-Type is being popped in set_formdata_body - # don't want to risk changing pipeline.transport, so doing twice here - _set_content_type_header("application/x-www-form-urlencoded", internal_request) - - -def _parse_lines_from_text(text): - # largely taken from httpx's LineDecoder code - lines = [] - last_chunk_of_text = "" - while text: - text_length = len(text) - for idx in range(text_length): - curr_char = text[idx] - next_char = None if idx == len(text) - 1 else text[idx + 1] - if curr_char == "\n": - lines.append(text[: idx + 1]) - text = text[idx + 1 :] - break - if curr_char == "\r" and next_char == "\n": - # if it ends with \r\n, we only do \n - lines.append(text[:idx] + "\n") - text = text[idx + 2 :] - break - if curr_char == "\r" and next_char is not None: - # if it's \r then a normal character, we switch \r to \n - lines.append(text[:idx] + "\n") - text = text[idx + 1 :] - break - if next_char is None: - text = "" - last_chunk_of_text += text - break - if last_chunk_of_text.endswith("\r"): - # if ends with \r, we switch \r to \n - lines.append(last_chunk_of_text[:-1] + "\n") - elif last_chunk_of_text: - lines.append(last_chunk_of_text) - return lines - - -################################## CLASSES ###################################### -class _StreamContextManager(object): - def __init__(self, client, request, **kwargs): - # type: (_PipelineClient, HttpRequest, Any) -> None - self.client = client - self.request = request - self.kwargs = kwargs - - def __enter__(self): - # type: (...) -> HttpResponse - """Actually make the call only when we enter. For sync stream_response calls""" - pipeline_transport_response = self.client._pipeline.run( - self.request._internal_request, stream=True, **self.kwargs - ).http_response - self.response = HttpResponse( # pylint: disable=attribute-defined-outside-init - request=self.request, _internal_response=pipeline_transport_response - ) - return self.response - - def __exit__(self, *args): - """Close our stream connection. For sync calls""" - self.response.__exit__(*args) - - def close(self): - self.response.close() - - -class HttpRequest(object): - """Represents an HTTP request. - - :param method: HTTP method (GET, HEAD, etc.) - :type method: str or ~azure.core.protocol.HttpVerbs - :param str url: The url for your request - :keyword params: Query parameters to be mapped into your URL. Your input - should be a mapping or sequence of query name to query value(s). - :paramtype params: mapping or sequence - :keyword headers: HTTP headers you want in your request. Your input should - be a mapping or sequence of header name to header value. - :paramtype headers: mapping or sequence - :keyword dict data: Form data you want in your request body. Use for form-encoded data, i.e. - HTML forms. - :keyword any json: A JSON serializable object. We handle JSON-serialization for your - object, so use this for more complicated data structures than `data`. - :keyword files: Files you want to in your request body. Use for uploading files with - multipart encoding. Your input should be a mapping or sequence of file name to file content. - Use the `data` kwarg in addition if you want to include non-file data files as part of your request. - :paramtype files: mapping or sequence - :keyword content: Content you want in your request body. Think of it as the kwarg you should input - if your data doesn't fit into `json`, `data`, or `files`. Accepts a bytes type, or a generator - that yields bytes. - :paramtype content: str or bytes or iterable[bytes] or asynciterable[bytes] - :ivar str url: The URL this request is against. - :ivar str method: The method type of this request. - :ivar headers: The HTTP headers you passed in to your request - :vartype headers: mapping or sequence - :ivar bytes content: The content passed in for the request - """ - - def __init__(self, method, url, **kwargs): - # type: (str, str, Any) -> None - - data = kwargs.pop("data", None) - content = kwargs.pop("content", None) - json_body = kwargs.pop("json", None) - files = kwargs.pop("files", None) - - self._internal_request = kwargs.pop( - "_internal_request", - _PipelineTransportHttpRequest( - method=method, - url=url, - headers=kwargs.pop("headers", None), - ), - ) - params = kwargs.pop("params", None) - - if params: - self._internal_request.format_parameters(params) - - _set_body( - content=content, - data=data, - files=files, - json_body=json_body, - internal_request=self._internal_request, - ) - - if kwargs: - raise TypeError( - "You have passed in kwargs '{}' that are not valid kwargs.".format( - "', '".join(list(kwargs.keys())) - ) - ) - - def _set_content_length_header(self): - method_check = self._internal_request.method.lower() in ["put", "post", "patch"] - content_length_unset = "Content-Length" not in self._internal_request.headers - if method_check and content_length_unset: - self._internal_request.headers["Content-Length"] = str( - len(self._internal_request.data) - ) - - @property - def url(self): - # type: (...) -> str - return self._internal_request.url - - @url.setter - def url(self, val): - # type: (str) -> None - self._internal_request.url = val - - @property - def method(self): - # type: (...) -> str - return self._internal_request.method - - @property - def headers(self): - # type: (...) -> HeadersType - return self._internal_request.headers - - @property - def content(self): - # type: (...) -> Any - """Gets the request content.""" - return self._internal_request.data or self._internal_request.files - - def __repr__(self): - return self._internal_request.__repr__() - - def __deepcopy__(self, memo=None): - return HttpRequest( - self.method, - self.url, - _internal_request=self._internal_request.__deepcopy__(memo), - ) - - -class _HttpResponseBase(object): - """Base class for HttpResponse and AsyncHttpResponse. - - :keyword request: The request that resulted in this response. - :paramtype request: ~azure.core.rest.HttpRequest - :ivar int status_code: The status code of this response - :ivar headers: The response headers - :vartype headers: dict[str, any] - :ivar str reason: The reason phrase for this response - :ivar bytes content: The response content in bytes - :ivar str url: The URL that resulted in this response - :ivar str encoding: The response encoding. Is settable, by default - is the response Content-Type header - :ivar str text: The response body as a string. - :ivar request: The request that resulted in this response. - :vartype request: ~azure.core.rest.HttpRequest - :ivar str content_type: The content type of the response - :ivar bool is_error: Whether this response is an error. - """ - - def __init__(self, **kwargs): - # type: (Any) -> None - self._internal_response = kwargs.pop( - "_internal_response" - ) # type: _PipelineTransportHttpResponseBase - self._request = kwargs.pop("request") - self.is_closed = False - self.is_stream_consumed = False - self._num_bytes_downloaded = 0 - - @property - def status_code(self): - # type: (...) -> int - """Returns the status code of the response""" - return self._internal_response.status_code - - @status_code.setter - def status_code(self, val): - # type: (int) -> None - """Set the status code of the response""" - self._internal_response.status_code = val - - @property - def headers(self): - # type: (...) -> HeadersType - """Returns the response headers""" - return self._internal_response.headers - - @property - def reason(self): - # type: (...) -> str - """Returns the reason phrase for the response""" - return self._internal_response.reason - - @property - def content(self): - # type: (...) -> bytes - """Returns the response content in bytes""" - raise NotImplementedError() - - @property - def url(self): - # type: (...) -> str - """Returns the URL that resulted in this response""" - return self._internal_response.request.url - - @property - def encoding(self): - # type: (...) -> Optional[str] - """Returns the response encoding. By default, is specified - by the response Content-Type header. - """ - - try: - return self._encoding - except AttributeError: - return self._get_charset_encoding() - - def _get_charset_encoding(self): - content_type = self.headers.get("Content-Type") - - if not content_type: - return None - _, params = cgi.parse_header(content_type) - encoding = params.get("charset") # -> utf-8 - if encoding is None or not _lookup_encoding(encoding): - return None - return encoding - - @encoding.setter - def encoding(self, value): - # type: (str) -> None - """Sets the response encoding""" - self._encoding = value - - @property - def text(self): - # type: (...) -> str - """Returns the response body as a string""" - _ = ( - self.content - ) # access content to make sure we trigger if response not fully read in - return self._internal_response.text(encoding=self.encoding) - - @property - def request(self): - # type: (...) -> HttpRequest - if self._request: - return self._request - raise RuntimeError( - "You are trying to access the 'request', but there is no request associated with this HttpResponse" - ) - - @request.setter - def request(self, val): - # type: (HttpRequest) -> None - self._request = val - - @property - def content_type(self): - # type: (...) -> Optional[str] - """Content Type of the response""" - return self._internal_response.content_type or self.headers.get("Content-Type") - - @property - def num_bytes_downloaded(self): - # type: (...) -> int - """See how many bytes of your stream response have been downloaded""" - return self._num_bytes_downloaded - - @property - def is_error(self): - # type: (...) -> bool - """See whether your HttpResponse is an error. - - Use .raise_for_status() if you want to raise if this response is an error. - """ - return self.status_code < 400 - - def json(self): - # type: (...) -> Any - """Returns the whole body as a json object. - - :return: The JSON deserialized response body - :rtype: any - :raises json.decoder.JSONDecodeError or ValueError (in python 2.7) if object is not JSON decodable: - """ - return json.loads(self.text) - - def raise_for_status(self): - # type: (...) -> None - """Raises an HttpResponseError if the response has an error status code. - - If response is good, does nothing. - """ - if self.status_code >= 400: - raise HttpResponseError(response=self) - - def __repr__(self): - # type: (...) -> str - content_type_str = ( - ", Content-Type: {}".format(self.content_type) if self.content_type else "" - ) - return "<{}: {} {}{}>".format( - type(self).__name__, self.status_code, self.reason, content_type_str - ) - - def _validate_streaming_access(self): - # type: (...) -> None - if self.is_closed: - raise ResponseClosedError() - if self.is_stream_consumed: - raise StreamConsumedError() - - -class HttpResponse(_HttpResponseBase): - @property - def content(self): - # type: (...) -> bytes - try: - return self._content - except AttributeError: - raise ResponseNotReadError() - - def close(self): - # type: (...) -> None - self.is_closed = True - self._internal_response.internal_response.close() - - def __exit__(self, *args): - # type: (...) -> None - self._internal_response.internal_response.__exit__(*args) - - def read(self): - # type: (...) -> bytes - """ - Read the response's bytes. - - """ - try: - return self._content - except AttributeError: - self._validate_streaming_access() - self._content = ( # pylint: disable=attribute-defined-outside-init - self._internal_response.body() or b"".join(self.iter_raw()) - ) - self._close_stream() - return self._content - - def iter_bytes(self, chunk_size=None): - # type: (int) -> Iterator[bytes] - """Iterate over the bytes in the response stream""" - try: - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), chunk_size): - yield self._content[i : i + chunk_size] - - except AttributeError: - for raw_bytes in self.iter_raw(chunk_size=chunk_size): - yield raw_bytes - - def iter_text(self, chunk_size=None): - # type: (int) -> Iterator[str] - """Iterate over the response text""" - for byte in self.iter_bytes(chunk_size): - text = byte.decode(self.encoding or "utf-8") - yield text - - def iter_lines(self, chunk_size=None): - # type: (int) -> Iterator[str] - for text in self.iter_text(chunk_size): - lines = _parse_lines_from_text(text) - for line in lines: - yield line - - def _close_stream(self): - # type: (...) -> None - self.is_stream_consumed = True - self.close() - - def iter_raw(self, **_): - # type: (int) -> Iterator[bytes] - """Iterate over the raw response bytes""" - self._validate_streaming_access() - stream_download = self._internal_response.stream_download(None) - for raw_bytes in stream_download: - self._num_bytes_downloaded += len(raw_bytes) - yield raw_bytes - - self._close_stream() - - -########################### ERRORS SECTION ################################# - - -class StreamConsumedError(Exception): - def __init__(self): - message = ( - "You are attempting to read or stream content that has already been streamed. " - "You have likely already consumed this stream, so it can not be accessed anymore." - ) - super(StreamConsumedError, self).__init__(message) - - -class ResponseClosedError(Exception): - def __init__(self): - message = ( - "You can not try to read or stream this response's content, since the " - "response has been closed." - ) - super(ResponseClosedError, self).__init__(message) - - -class ResponseNotReadError(Exception): - def __init__(self): - message = ( - "You have not read in the response's bytes yet. Call response.read() first." - ) - super(ResponseNotReadError, self).__init__(message) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest_py3.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest_py3.py deleted file mode 100644 index ed2908d6948..00000000000 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/core/rest/_rest_py3.py +++ /dev/null @@ -1,739 +0,0 @@ -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# pylint currently complains about typing.Union not being subscriptable -# pylint: disable=unsubscriptable-object - - -import asyncio -import codecs -import json -from enum import Enum -import xml.etree.ElementTree as ET -from typing import ( - Any, - AsyncIterable, - IO, - Iterable, - Iterator, - Optional, - Union, - Mapping, - Sequence, - Tuple, - List, -) -from abc import abstractmethod - -import cgi - -from azure.core.exceptions import HttpResponseError -from azure.core.pipeline.transport import ( - HttpRequest as _PipelineTransportHttpRequest, -) - -from azure.core.pipeline.transport._base import ( - _HttpResponseBase as _PipelineTransportHttpResponseBase, -) - -from azure.core._pipeline_client import PipelineClient as _PipelineClient -from azure.core._pipeline_client_async import ( - AsyncPipelineClient as _AsyncPipelineClient, -) - -################################### TYPES SECTION ######################### - -ByteStream = Union[Iterable[bytes], AsyncIterable[bytes]] -PrimitiveData = Optional[Union[str, int, float, bool]] - - -ParamsType = Union[ - Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], - List[Tuple[str, PrimitiveData]], -] - -HeadersType = Union[Mapping[str, str], Sequence[Tuple[str, str]]] - -ContentType = Union[str, bytes, ByteStream] - -FileContent = Union[str, bytes, IO[str], IO[bytes]] -FileType = Union[ - Tuple[Optional[str], FileContent], -] - -FilesType = Union[Mapping[str, FileType], Sequence[Tuple[str, FileType]]] - - - -class HttpVerbs(str, Enum): - GET = "GET" - PUT = "PUT" - POST = "POST" - HEAD = "HEAD" - PATCH = "PATCH" - DELETE = "DELETE" - MERGE = "MERGE" - - -########################### UTILS SECTION ################################# - - -def _is_stream_or_str_bytes(content: Any) -> bool: - return isinstance(content, (str, bytes)) or any( - hasattr(content, attr) for attr in ["read", "__iter__", "__aiter__"] - ) - - -def _lookup_encoding(encoding: str) -> bool: - # including check for whether encoding is known taken from httpx - try: - codecs.lookup(encoding) - return True - except LookupError: - return False - - -def _set_content_length_header( - header_name: str, header_value: str, internal_request: _PipelineTransportHttpRequest -) -> None: - valid_methods = ["put", "post", "patch"] - content_length_headers = ["Content-Length", "Transfer-Encoding"] - if internal_request.method.lower() in valid_methods and not any( - [c for c in content_length_headers if c in internal_request.headers] - ): - internal_request.headers[header_name] = header_value - - -def _set_content_type_header( - header_value: str, internal_request: _PipelineTransportHttpRequest -) -> None: - if not internal_request.headers.get("Content-Type"): - internal_request.headers["Content-Type"] = header_value - - -def _set_content_body( - content: ContentType, internal_request: _PipelineTransportHttpRequest -) -> None: - headers = internal_request.headers - content_type = headers.get("Content-Type") - if _is_stream_or_str_bytes(content): - # stream will be bytes / str, or iterator of bytes / str - internal_request.set_streamed_data_body(content) - if isinstance(content, str) and content: - _set_content_length_header( - "Content-Length", str(len(internal_request.data)), internal_request - ) - _set_content_type_header("text/plain", internal_request) - elif isinstance(content, bytes) and content: - _set_content_length_header( - "Content-Length", str(len(internal_request.data)), internal_request - ) - _set_content_type_header("application/octet-stream", internal_request) - elif isinstance(content, (Iterable, AsyncIterable)): # pylint: disable=isinstance-second-argument-not-valid-type - # _set_content_length_header("Transfer-Encoding", "chunked", internal_request) - _set_content_type_header("application/octet-stream", internal_request) - elif isinstance(content, ET.Element): - # XML body - internal_request.set_xml_body(content) - _set_content_type_header("application/xml", internal_request) - _set_content_length_header( - "Content-Length", str(len(internal_request.data)), internal_request - ) - elif content_type and content_type.startswith("text/"): - # Text body - internal_request.set_text_body(content) - _set_content_length_header( - "Content-Length", str(len(internal_request.data)), internal_request - ) - else: - # Other body - internal_request.data = content - internal_request.headers = headers - - -def _set_body( - content: ContentType, - data: dict, - files: Any, - json_body: Any, - internal_request: _PipelineTransportHttpRequest, -) -> None: - if data is not None and not isinstance(data, dict): - content = data - data = None - if content is not None: - _set_content_body(content, internal_request) - elif json_body is not None: - internal_request.set_json_body(json_body) - _set_content_type_header("application/json", internal_request) - elif files is not None: - internal_request.set_formdata_body(files) - # if you don't supply your content type, we'll create a boundary for you with multipart/form-data - # boundary = binascii.hexlify(os.urandom(16)).decode( - # "ascii" - #) # got logic from httpx, thanks httpx! - # _set_content_type_header("multipart/form-data; boundary={}".format(boundary), internal_request) - elif data: - _set_content_type_header("application/x-www-form-urlencoded", internal_request) - internal_request.set_formdata_body(data) - # need to set twice because Content-Type is being popped in set_formdata_body - # don't want to risk changing pipeline.transport, so doing twice here - _set_content_type_header("application/x-www-form-urlencoded", internal_request) - - -def _parse_lines_from_text(text): - # largely taken from httpx's LineDecoder code - lines = [] - last_chunk_of_text = "" - while text: - text_length = len(text) - for idx in range(text_length): - curr_char = text[idx] - next_char = None if idx == len(text) - 1 else text[idx + 1] - if curr_char == "\n": - lines.append(text[: idx + 1]) - text = text[idx + 1 :] - break - if curr_char == "\r" and next_char == "\n": - # if it ends with \r\n, we only do \n - lines.append(text[:idx] + "\n") - text = text[idx + 2 :] - break - if curr_char == "\r" and next_char is not None: - # if it's \r then a normal character, we switch \r to \n - lines.append(text[:idx] + "\n") - text = text[idx + 1 :] - break - if next_char is None: - text = "" - last_chunk_of_text += text - break - if last_chunk_of_text.endswith("\r"): - # if ends with \r, we switch \r to \n - lines.append(last_chunk_of_text[:-1] + "\n") - elif last_chunk_of_text: - lines.append(last_chunk_of_text) - return lines - - -class _StreamContextManagerBase: - def __init__( - self, - client: Union[_PipelineClient, _AsyncPipelineClient], - request: "HttpRequest", - **kwargs - ): - """Used so we can treat stream requests and responses as a context manager. - - In Autorest, we only return a `StreamContextManager` if users pass in `stream_response` True - - Actually sends request when we enter the context manager, closes response when we exit. - - Heavily inspired from httpx, we want the same behavior for it to feel consistent for users - """ - self.client = client - self.request = request - self.kwargs = kwargs - - @abstractmethod - def close(self): - ... - - -class _StreamContextManager(_StreamContextManagerBase): - def __enter__(self) -> "HttpResponse": - """Actually make the call only when we enter. For sync stream_response calls""" - pipeline_transport_response = self.client._pipeline.run( - self.request._internal_request, stream=True, **self.kwargs - ).http_response - self.response = HttpResponse( # pylint: disable=attribute-defined-outside-init - request=self.request, _internal_response=pipeline_transport_response - ) - return self.response - - def __exit__(self, *args): - """Close our stream connection. For sync calls""" - self.response.__exit__(*args) - - def close(self): - self.response.close() - - -class _AsyncStreamContextManager(_StreamContextManagerBase): - async def __aenter__(self) -> "AsyncHttpResponse": - """Actually make the call only when we enter. For async stream_response calls.""" - if not isinstance(self.client, _AsyncPipelineClient): - raise TypeError( - "Only sync calls should enter here. If you mean to do a sync call, " - "make sure to use 'with' instead." - ) - pipeline_transport_response = ( - await self.client._pipeline.run( - self.request._internal_request, stream=True, **self.kwargs - ) - ).http_response - self.response = AsyncHttpResponse( # pylint: disable=attribute-defined-outside-init - request=self.request, _internal_response=pipeline_transport_response - ) - return self.response - - async def __aexit__(self, *args): - await self.response.__aexit__(*args) - - async def close(self): # pylint: disable=invalid-overridden-method - await self.response.close() - - -################################## CLASSES ###################################### - - -class HttpRequest: - """Represents an HTTP request. - - :param method: HTTP method (GET, HEAD, etc.) - :type method: str or ~azure.core.protocol.HttpVerbs - :param str url: The url for your request - :keyword params: Query parameters to be mapped into your URL. Your input - should be a mapping or sequence of query name to query value(s). - :paramtype params: mapping or sequence - :keyword headers: HTTP headers you want in your request. Your input should - be a mapping or sequence of header name to header value. - :paramtype headers: mapping or sequence - :keyword any json: A JSON serializable object. We handle JSON-serialization for your - object, so use this for more complicated data structures than `data`. - :keyword content: Content you want in your request body. Think of it as the kwarg you should input - if your data doesn't fit into `json`, `data`, or `files`. Accepts a bytes type, or a generator - that yields bytes. - :paramtype content: str or bytes or iterable[bytes] or asynciterable[bytes] - :keyword dict data: Form data you want in your request body. Use for form-encoded data, i.e. - HTML forms. - :keyword files: Files you want to in your request body. Use for uploading files with - multipart encoding. Your input should be a mapping or sequence of file name to file content. - Use the `data` kwarg in addition if you want to include non-file data files as part of your request. - :paramtype files: mapping or sequence - :ivar str url: The URL this request is against. - :ivar str method: The method type of this request. - :ivar headers: The HTTP headers you passed in to your request - :vartype headers: mapping or sequence - :ivar bytes content: The content passed in for the request - """ - - def __init__( - self, - method: str, - url: str, - *, - params: Optional[ParamsType] = None, - headers: Optional[HeadersType] = None, - json: Any = None, # pylint: disable=redefined-outer-name - content: Optional[ContentType] = None, - data: Optional[dict] = None, - files: Optional[FilesType] = None, - **kwargs - ): - # type: (str, str, Any) -> None - - self._internal_request = kwargs.pop( - "_internal_request", - _PipelineTransportHttpRequest( - method=method, - url=url, - headers=headers, - ), - ) - - if params: - self._internal_request.format_parameters(params) - - _set_body( - content=content, - data=data, - files=files, - json_body=json, - internal_request=self._internal_request, - ) - - if kwargs: - raise TypeError( - "You have passed in kwargs '{}' that are not valid kwargs.".format( - "', '".join(list(kwargs.keys())) - ) - ) - - def _set_content_length_header(self) -> None: - method_check = self._internal_request.method.lower() in ["put", "post", "patch"] - content_length_unset = "Content-Length" not in self._internal_request.headers - if method_check and content_length_unset: - self._internal_request.headers["Content-Length"] = str( - len(self._internal_request.data) - ) - - @property - def url(self) -> str: - return self._internal_request.url - - @url.setter - def url(self, val: str) -> None: - self._internal_request.url = val - - @property - def method(self) -> str: - return self._internal_request.method - - @property - def headers(self) -> HeadersType: - return self._internal_request.headers - - @property - def content(self) -> Any: - """Gets the request content.""" - return self._internal_request.data or self._internal_request.files - - def __repr__(self) -> str: - return self._internal_request.__repr__() - - def __deepcopy__(self, memo=None) -> "HttpRequest": - return HttpRequest( - self.method, - self.url, - _internal_request=self._internal_request.__deepcopy__(memo), - ) - - -class _HttpResponseBase: - """Base class for HttpResponse and AsyncHttpResponse. - - :keyword request: The request that resulted in this response. - :paramtype request: ~azure.core.rest.HttpRequest - :ivar int status_code: The status code of this response - :ivar headers: The response headers - :vartype headers: dict[str, any] - :ivar str reason: The reason phrase for this response - :ivar bytes content: The response content in bytes - :ivar str url: The URL that resulted in this response - :ivar str encoding: The response encoding. Is settable, by default - is the response Content-Type header - :ivar str text: The response body as a string. - :ivar request: The request that resulted in this response. - :vartype request: ~azure.core.rest.HttpRequest - :ivar str content_type: The content type of the response - :ivar bool is_closed: Whether the network connection has been closed yet - :ivar bool is_stream_consumed: When getting a stream response, checks - whether the stream has been fully consumed - :ivar int num_bytes_downloaded: The number of bytes in your stream that - have been downloaded - """ - - def __init__(self, *, request: HttpRequest, **kwargs): - self._internal_response = kwargs.pop( - "_internal_response" - ) # type: _PipelineTransportHttpResponseBase - self._request = request - self.is_closed = False - self.is_stream_consumed = False - self._num_bytes_downloaded = 0 - - @property - def status_code(self) -> int: - """Returns the status code of the response""" - return self._internal_response.status_code - - @status_code.setter - def status_code(self, val: int) -> None: - """Set the status code of the response""" - self._internal_response.status_code = val - - @property - def headers(self) -> HeadersType: - """Returns the response headers""" - return self._internal_response.headers - - @property - def reason(self) -> str: - """Returns the reason phrase for the response""" - return self._internal_response.reason - - @property - def content(self) -> bytes: - """Returns the response content in bytes""" - raise NotImplementedError() - - @property - def url(self) -> str: - """Returns the URL that resulted in this response""" - return self._internal_response.request.url - - @property - def encoding(self) -> str: - """Returns the response encoding. By default, is specified - by the response Content-Type header. - """ - - try: - return self._encoding - except AttributeError: - return self._get_charset_encoding() - - def _get_charset_encoding(self) -> str: - content_type = self.headers.get("Content-Type") - - if not content_type: - return None - _, params = cgi.parse_header(content_type) - encoding = params.get("charset") # -> utf-8 - if encoding is None or not _lookup_encoding(encoding): - return None - return encoding - - @encoding.setter - def encoding(self, value: str) -> None: - # type: (str) -> None - """Sets the response encoding""" - self._encoding = value - - @property - def text(self) -> str: - """Returns the response body as a string""" - _ = self.content # access content to make sure we trigger if response not fully read in - return self._internal_response.text(encoding=self.encoding) - - @property - def request(self) -> HttpRequest: - if self._request: - return self._request - raise RuntimeError( - "You are trying to access the 'request', but there is no request associated with this HttpResponse" - ) - - @request.setter - def request(self, val: HttpRequest) -> None: - self._request = val - - @property - def content_type(self) -> Optional[str]: - """Content Type of the response""" - return self._internal_response.content_type or self.headers.get("Content-Type") - - @property - def num_bytes_downloaded(self) -> int: - """See how many bytes of your stream response have been downloaded""" - return self._num_bytes_downloaded - - def json(self) -> Any: - """Returns the whole body as a json object. - - :return: The JSON deserialized response body - :rtype: any - :raises json.decoder.JSONDecodeError or ValueError (in python 2.7) if object is not JSON decodable: - """ - return json.loads(self.text) - - def raise_for_status(self) -> None: - """Raises an HttpResponseError if the response has an error status code. - - If response is good, does nothing. - """ - if self.status_code >= 400: - raise HttpResponseError(response=self) - - def __repr__(self) -> str: - content_type_str = ( - ", Content-Type: {}".format(self.content_type) if self.content_type else "" - ) - return "<{}: {} {}{}>".format( - type(self).__name__, self.status_code, self.reason, content_type_str - ) - - def _validate_streaming_access(self) -> None: - if self.is_closed: - raise ResponseClosedError() - if self.is_stream_consumed: - raise StreamConsumedError() - - -class HttpResponse(_HttpResponseBase): - @property - def content(self): - # type: (...) -> bytes - try: - return self._content - except AttributeError: - raise ResponseNotReadError() - - def close(self) -> None: - self.is_closed = True - self._internal_response.internal_response.close() - - def __exit__(self, *args) -> None: - self._internal_response.internal_response.__exit__(*args) - - def read(self) -> bytes: - """ - Read the response's bytes. - - """ - try: - return self._content - except AttributeError: - self._validate_streaming_access() - self._content = (self._internal_response.body() or # pylint: disable=attribute-defined-outside-init - b"".join(self.iter_raw())) - self._close_stream() - return self._content - - def iter_bytes(self, chunk_size: int = None) -> Iterator[bytes]: - """Iterate over the bytes in the response stream""" - try: - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), chunk_size): - yield self._content[i : i + chunk_size] - - except AttributeError: - for raw_bytes in self.iter_raw(chunk_size=chunk_size): - yield raw_bytes - - def iter_text(self, chunk_size: int = None) -> Iterator[str]: - """Iterate over the response text""" - for byte in self.iter_bytes(chunk_size): - text = byte.decode(self.encoding or "utf-8") - yield text - - def iter_lines(self, chunk_size: int = None) -> Iterator[str]: - for text in self.iter_text(chunk_size): - lines = _parse_lines_from_text(text) - for line in lines: - yield line - - def _close_stream(self) -> None: - self.is_stream_consumed = True - self.close() - - def iter_raw(self, **_) -> Iterator[bytes]: - """Iterate over the raw response bytes""" - self._validate_streaming_access() - stream_download = self._internal_response.stream_download(None) - for raw_bytes in stream_download: - self._num_bytes_downloaded += len(raw_bytes) - yield raw_bytes - - self._close_stream() - - -class AsyncHttpResponse(_HttpResponseBase): - @property - def content(self) -> bytes: - try: - return self._content - except AttributeError: - raise ResponseNotReadError() - - async def _close_stream(self) -> None: - self.is_stream_consumed = True - await self.close() - - async def read(self) -> bytes: - """ - Read the response's bytes. - - """ - try: - return self._content - except AttributeError: - self._validate_streaming_access() - await self._internal_response.load_body() - self._content = self._internal_response._body # pylint: disable=protected-access,attribute-defined-outside-init - await self._close_stream() - return self._content - - async def iter_bytes(self, chunk_size: int = None) -> Iterator[bytes]: - """Iterate over the bytes in the response stream""" - try: - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), chunk_size): - yield self._content[i : i + chunk_size] - - except AttributeError: - async for raw_bytes in self.iter_raw(chunk_size=chunk_size): - yield raw_bytes - - async def iter_text(self, chunk_size: int = None) -> Iterator[str]: - """Iterate over the response text""" - async for byte in self.iter_bytes(chunk_size): - text = byte.decode(self.encoding or "utf-8") - yield text - - async def iter_lines(self, chunk_size: int = None) -> Iterator[str]: - async for text in self.iter_text(chunk_size): - lines = _parse_lines_from_text(text) - for line in lines: - yield line - - async def iter_raw(self, **_) -> Iterator[bytes]: - """Iterate over the raw response bytes""" - self._validate_streaming_access() - stream_download = self._internal_response.stream_download(None) - async for raw_bytes in stream_download: - self._num_bytes_downloaded += len(raw_bytes) - yield raw_bytes - - await self._close_stream() - - async def close(self) -> None: - self.is_closed = True - self._internal_response.internal_response.close() - await asyncio.sleep(0) - - async def __aexit__(self, *args) -> None: - await self._internal_response.internal_response.__aexit__(*args) - - -########################### ERRORS SECTION ################################# - - -class StreamConsumedError(Exception): - def __init__(self) -> None: - message = ( - "You are attempting to read or stream content that has already been streamed. " - "You have likely already consumed this stream, so it can not be accessed anymore." - ) - super().__init__(message) - - -class ResponseClosedError(Exception): - def __init__(self) -> None: - message = ( - "You can not try to read or stream this response's content, since the " - "response has been closed." - ) - super().__init__(message) - - -class ResponseNotReadError(Exception): - def __init__(self) -> None: - message = ( - "You have not read in the response's bytes yet. Call response.read() first." - ) - super().__init__(message) diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/py.typed b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/py.typed new file mode 100644 index 00000000000..e5aff4f83af --- /dev/null +++ b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/rest.py b/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/rest.py deleted file mode 100644 index 8da673e1a39..00000000000 --- a/src/webpubsub/azext_webpubsub/vendored_sdks/azure_messaging_webpubsubservice/rest.py +++ /dev/null @@ -1,942 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -# pylint: disable=line-too-long - -__all__ = [ - 'build_add_connection_to_group_request', - 'build_add_user_to_group_request', - 'build_connection_exists_request', - 'build_group_exists_request', - 'build_check_permission_request', - 'build_user_exists_request', - 'build_close_client_connection_request', - 'build_grant_permission_request', - 'build_healthapi_get_health_status_request', - 'build_remove_connection_from_group_request', - 'build_remove_user_from_all_groups_request', - 'build_remove_user_from_group_request', - 'build_revoke_permission_request', - 'build_send_to_all_request', - 'build_send_to_connection_request', - 'build_send_to_group_request', - 'build_send_to_user_request' -] -from typing import TYPE_CHECKING -from msrest import Serializer -from azure.core.pipeline.transport._base import _format_url_section -from .core.rest import HttpRequest - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, IO, List, Optional, Union, Dict - from typing_extensions import Literal - Permissions = Union[Literal['joinLeaveGroup'], Literal['sendToGroup']] # pylint: disable=unsubscriptable-object - -_SERIALIZER = Serializer() - - -def build_healthapi_get_health_status_request( - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Get service health status. - - Get service health status. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/health') - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="HEAD", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_send_to_all_request( - hub, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Broadcast content inside request body to all the connected client connections. - - Broadcast content inside request body to all the connected client connections. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :keyword json: The payload body. - :paramtype json: Any - :keyword content: The payload body. - :paramtype content: IO - :keyword excluded: Excluded connection Ids. - :paramtype excluded: list[str] - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your `json` input. - json = "Any (optional)" - """ - excluded = kwargs.pop('excluded', None) # type: Optional[List[str]] - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - content_type = kwargs.pop("content_type", None) - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/:send') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if excluded is not None: - query_parameters['excluded'] = [_SERIALIZER.query("excluded", q, 'str') if q is not None else '' for q in excluded] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - - return HttpRequest( - method="POST", - url=url, - params=query_parameters, - headers=header_parameters, - **kwargs - ) - - -def build_connection_exists_request( - hub, # type: str - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Check if the connection with the given connectionId exists. - - Check if the connection with the given connectionId exists. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param connection_id: The connection Id. - :type connection_id: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/connections/{connectionId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="HEAD", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_close_client_connection_request( - hub, # type: str - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Close the client connection. - - Close the client connection. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param connection_id: Target connection Id. - :type connection_id: str - :keyword reason: The reason closing the client connection. - :paramtype reason: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - reason = kwargs.pop('reason', None) # type: Optional[str] - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/connections/{connectionId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if reason is not None: - query_parameters['reason'] = _SERIALIZER.query("reason", reason, 'str') - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="DELETE", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_send_to_connection_request( - hub, # type: str - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Send content inside request body to the specific connection. - - Send content inside request body to the specific connection. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param connection_id: The connection Id. - :type connection_id: str - :keyword json: The payload body. - :paramtype json: Any - :keyword content: The payload body. - :paramtype content: IO - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your `json` input. - json = "Any (optional)" - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - content_type = kwargs.pop("content_type", None) - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/connections/{connectionId}/:send') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - - return HttpRequest( - method="POST", - url=url, - params=query_parameters, - headers=header_parameters, - **kwargs - ) - - -def build_group_exists_request( - hub, # type: str - group, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Check if there are any client connections inside the given group. - - Check if there are any client connections inside the given group. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param group: Target group name, which length should be greater than 0 and less than 1025. - :type group: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="HEAD", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_send_to_group_request( - hub, # type: str - group, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Send content inside request body to a group of connections. - - Send content inside request body to a group of connections. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param group: Target group name, which length should be greater than 0 and less than 1025. - :type group: str - :keyword json: The payload body. - :paramtype json: Any - :keyword content: The payload body. - :paramtype content: IO - :keyword excluded: Excluded connection Ids. - :paramtype excluded: list[str] - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your `json` input. - json = "Any (optional)" - """ - excluded = kwargs.pop('excluded', None) # type: Optional[List[str]] - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - content_type = kwargs.pop("content_type", None) - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}/:send') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if excluded is not None: - query_parameters['excluded'] = [_SERIALIZER.query("excluded", q, 'str') if q is not None else '' for q in excluded] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - - return HttpRequest( - method="POST", - url=url, - params=query_parameters, - headers=header_parameters, - **kwargs - ) - - -def build_add_connection_to_group_request( - hub, # type: str - group, # type: str - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Add a connection to the target group. - - Add a connection to the target group. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param group: Target group name, which length should be greater than 0 and less than 1025. - :type group: str - :param connection_id: Target connection Id. - :type connection_id: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}/connections/{connectionId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="PUT", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_remove_connection_from_group_request( - hub, # type: str - group, # type: str - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Remove a connection from the target group. - - Remove a connection from the target group. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param group: Target group name, which length should be greater than 0 and less than 1025. - :type group: str - :param connection_id: Target connection Id. - :type connection_id: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/groups/{group}/connections/{connectionId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="DELETE", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_user_exists_request( - hub, # type: str - user_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Check if there are any client connections connected for the given user. - - Check if there are any client connections connected for the given user. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param user_id: Target user Id. - :type user_id: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="HEAD", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_send_to_user_request( - hub, # type: str - user_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Send content inside request body to the specific user. - - Send content inside request body to the specific user. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param user_id: The user Id. - :type user_id: str - :keyword json: The payload body. - :paramtype json: Any - :keyword content: The payload body. - :paramtype content: IO - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your `json` input. - json = "Any (optional)" - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - content_type = kwargs.pop("content_type", None) - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/:send') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - - return HttpRequest( - method="POST", - url=url, - params=query_parameters, - headers=header_parameters, - **kwargs - ) - - -def build_add_user_to_group_request( - hub, # type: str - group, # type: str - user_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Add a user to the target group. - - Add a user to the target group. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param group: Target group name, which length should be greater than 0 and less than 1025. - :type group: str - :param user_id: Target user Id. - :type user_id: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/groups/{group}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1), - 'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="PUT", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_remove_user_from_group_request( - hub, # type: str - group, # type: str - user_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Remove a user from the target group. - - Remove a user from the target group. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param group: Target group name, which length should be greater than 0 and less than 1025. - :type group: str - :param user_id: Target user Id. - :type user_id: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/groups/{group}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'group': _SERIALIZER.url("group", group, 'str', max_length=1024, min_length=1), - 'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="DELETE", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_remove_user_from_all_groups_request( - hub, # type: str - user_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Remove a user from all groups. - - Remove a user from all groups. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param user_id: Target user Id. - :type user_id: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/users/{userId}/groups') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'userId': _SERIALIZER.url("user_id", user_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="DELETE", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_grant_permission_request( - hub, # type: str - permission, # type: Permissions - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Grant permission to the connection. - - Grant permission to the connection. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param permission: The permission: current supported actions are joinLeaveGroup and - sendToGroup. - :type permission: str or ~Permissions - :param connection_id: Target connection Id. - :type connection_id: str - :keyword target_name: Optional. If not set, grant the permission to all the targets. If set, - grant the permission to the specific target. The meaning of the target depends on the specific - permission. - :paramtype target_name: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - target_name = kwargs.pop('target_name', None) # type: Optional[str] - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'permission': _SERIALIZER.url("permission", permission, 'str'), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if target_name is not None: - query_parameters['targetName'] = _SERIALIZER.query("target_name", target_name, 'str') - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="PUT", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_revoke_permission_request( - hub, # type: str - permission, # type: Permissions - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Revoke permission for the connection. - - Revoke permission for the connection. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param permission: The permission: current supported actions are joinLeaveGroup and - sendToGroup. - :type permission: str or ~Permissions - :param connection_id: Target connection Id. - :type connection_id: str - :keyword target_name: Optional. If not set, revoke the permission for all targets. If set, - revoke the permission for the specific target. The meaning of the target depends on the - specific permission. - :paramtype target_name: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - target_name = kwargs.pop('target_name', None) # type: Optional[str] - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'permission': _SERIALIZER.url("permission", permission, 'str'), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if target_name is not None: - query_parameters['targetName'] = _SERIALIZER.query("target_name", target_name, 'str') - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="DELETE", - url=url, - params=query_parameters, - **kwargs - ) - - -def build_check_permission_request( - hub, # type: str - permission, # type: Permissions - connection_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - """Check if a connection has permission to the specified action. - - Check if a connection has permission to the specified action. - - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this request builder into your code flow. - - :param hub: Target hub name, which should start with alphabetic characters and only contain - alpha-numeric characters or underscore. - :type hub: str - :param permission: The permission: current supported actions are joinLeaveGroup and - sendToGroup. - :type permission: ~Permissions - :param connection_id: Target connection Id. - :type connection_id: str - :keyword target_name: Optional. If not set, get the permission for all targets. If set, get the - permission for the specific target. The meaning of the target depends on the specific - permission. - :paramtype target_name: str - :keyword api_version: Api Version. - :paramtype api_version: str - :return: Returns an :class:`~azure.messaging.webpubsubservice.core.rest.HttpRequest` that you will pass to the client's `send_request` method. - See https://aka.ms/azsdk/python/llcwiki for how to incorporate this response into your code flow. - :rtype: ~azure.messaging.webpubsubservice.core.rest.HttpRequest - """ - target_name = kwargs.pop('target_name', None) # type: Optional[str] - api_version = kwargs.pop('api_version', "2021-05-01-preview") # type: Optional[str] - - # Construct URL - url = kwargs.pop("template_url", '/api/hubs/{hub}/permissions/{permission}/connections/{connectionId}') - path_format_arguments = { - 'hub': _SERIALIZER.url("hub", hub, 'str', pattern=r'^[A-Za-z][A-Za-z0-9_`,.[\]]{0,127}$'), - 'permission': _SERIALIZER.url("permission", permission, 'str'), - 'connectionId': _SERIALIZER.url("connection_id", connection_id, 'str', min_length=1), - } - url = _format_url_section(url, **path_format_arguments) - - # Construct parameters - query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - if target_name is not None: - query_parameters['targetName'] = _SERIALIZER.query("target_name", target_name, 'str') - if api_version is not None: - query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - return HttpRequest( - method="HEAD", - url=url, - params=query_parameters, - **kwargs - ) diff --git a/src/webpubsub/setup.py b/src/webpubsub/setup.py index 1dc1227bf84..5a9e2e9b70d 100644 --- a/src/webpubsub/setup.py +++ b/src/webpubsub/setup.py @@ -16,7 +16,7 @@ # TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '1.7.1' +VERSION = '1.7.2' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 3bdc9990920bab308625fb79ef8c0896b0dc1b0c Mon Sep 17 00:00:00 2001 From: Chenyang Liu Date: Wed, 23 Jul 2025 11:21:37 +1000 Subject: [PATCH 2/2] Fix after upgrade --- src/webpubsub/azext_webpubsub/service.py | 97 ++++++++---------------- 1 file changed, 31 insertions(+), 66 deletions(-) diff --git a/src/webpubsub/azext_webpubsub/service.py b/src/webpubsub/azext_webpubsub/service.py index 1e3b8901e2e..b3132363b56 100644 --- a/src/webpubsub/azext_webpubsub/service.py +++ b/src/webpubsub/azext_webpubsub/service.py @@ -7,116 +7,81 @@ from .vendored_sdks.azure_messaging_webpubsubservice import ( WebPubSubServiceClient ) -from .vendored_sdks.azure_messaging_webpubsubservice.rest import ( - build_send_to_all_request, - build_connection_exists_request, - build_close_client_connection_request, - build_send_to_connection_request, - build_add_connection_to_group_request, - build_remove_connection_from_group_request, - build_send_to_group_request, - build_user_exists_request, - build_send_to_user_request, - build_add_user_to_group_request, - build_remove_user_from_group_request, - build_remove_user_from_all_groups_request, - build_grant_permission_request, - build_check_permission_request, - build_revoke_permission_request -) def broadcast(client, resource_group_name, webpubsub_name, hub_name, payload): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_send_to_all_request(hub_name, content=payload, content_type='text/plain')) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.send_to_all(message=payload, content_type='text/plain') def check_connection_exists(client, resource_group_name, webpubsub_name, hub_name, connection_id): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_connection_exists_request(hub_name, connection_id)) - return _get_existence_response(res.status_code == 200) + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + return service_client.connection_exists(connection_id) def close_connection(client, resource_group_name, webpubsub_name, hub_name, connection_id): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_close_client_connection_request(hub_name, connection_id)) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.close_connection(connection_id) def send_connection(client, resource_group_name, webpubsub_name, hub_name, connection_id, payload): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_send_to_connection_request(hub_name, connection_id, content=payload, content_type='text/plain')) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.send_to_connection(connection_id, message=payload, content_type='text/plain') def add_connection_to_group(client, resource_group_name, webpubsub_name, hub_name, connection_id, group_name): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_add_connection_to_group_request(hub_name, group_name, connection_id)) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.add_connection_to_group(group_name, connection_id) def remove_connection_from_group(client, resource_group_name, webpubsub_name, hub_name, connection_id, group_name): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_remove_connection_from_group_request(hub_name, group_name, connection_id)) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.remove_connection_from_group(group_name, connection_id) def send_group(client, resource_group_name, webpubsub_name, hub_name, group_name, payload): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_send_to_group_request(hub_name, group_name, content=payload, content_type='text/plain')) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.send_to_group(group_name, payload, content_type='text/plain') def check_user_exists(client, resource_group_name, webpubsub_name, hub_name, user_id): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_user_exists_request(hub_name, user_id)) - return _get_existence_response(res.status_code == 200) + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + return service_client.user_exists(user_id) def send_user(client, resource_group_name, webpubsub_name, hub_name, user_id, payload): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_send_to_user_request(hub_name, user_id, content=payload, content_type='text/plain')) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.send_to_user(user_id, payload, content_type='text/plain') def add_user_to_group(client, resource_group_name, webpubsub_name, hub_name, user_id, group_name): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_add_user_to_group_request(hub_name, group_name, user_id)) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.add_user_to_group(group_name, user_id) def remove_user_from_group(client, resource_group_name, webpubsub_name, hub_name, user_id, group_name=None): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) if group_name: - res = service_client.send_request(build_remove_user_from_group_request(hub_name, group_name, user_id)) + service_client.remove_user_from_group(group_name, user_id) else: - res = service_client.send_request(build_remove_user_from_all_groups_request(hub_name, user_id)) - res.raise_for_status() + service_client.remove_user_from_all_groups(user_id) def grant_permission(client, resource_group_name, webpubsub_name, hub_name, connection_id, permission, group_name): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_grant_permission_request(hub_name, permission, connection_id, target_name=group_name)) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.grant_permission(permission, connection_id, target_name=group_name) def revoke_permission(client, resource_group_name, webpubsub_name, hub_name, connection_id, permission, group_name): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_revoke_permission_request(hub_name, permission, connection_id, target_name=group_name)) - res.raise_for_status() + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + service_client.revoke_permission(permission, connection_id, target_name=group_name) def check_permission(client, resource_group_name, webpubsub_name, hub_name, connection_id, permission, group_name): - service_client = _get_service_client(client, resource_group_name, webpubsub_name) - res = service_client.send_request(build_check_permission_request(hub_name, permission, connection_id, target_name=group_name)) - return _get_existence_response(res.status_code == 200) + service_client = _get_service_client(client, resource_group_name, webpubsub_name, hub_name) + return service_client.has_permission(permission, connection_id, target_name=group_name) -def _get_service_client(client, resource_group_name, webpubsub_name): +def _get_service_client(client, resource_group_name, webpubsub_name, hub) -> WebPubSubServiceClient: keys = client.list_keys(resource_group_name, webpubsub_name) - return WebPubSubServiceClient.from_connection_string(keys.primary_connection_string) - - -def _get_existence_response(success): - return {"existence": success} + return WebPubSubServiceClient.from_connection_string(keys.primary_connection_string, hub)